Compare commits

...

10 Commits

Author SHA1 Message Date
1fbae343a4 Fix RFC3339 serialization 2023-06-06 11:26:46 +02:00
31418bf0e6 v0.0.130 2023-06-05 13:30:32 +02:00
6d45f6f667 v0.0.129 2023-06-05 13:24:52 +02:00
f610a2202c v0.0.128 2023-06-02 09:44:31 +02:00
2807299d46 v0.0.127 2023-05-28 22:55:06 +02:00
e872dbccec v0.0.126 2023-05-28 19:53:30 +02:00
9daf71e2ed v0.0.125 2023-05-28 19:41:24 +02:00
fe278f7772 v0.0.124 2023-05-28 18:21:02 +02:00
8ebda6fb3a v0.0.123 2023-05-25 18:20:31 +02:00
b0d3ce8c1c v0.0.122 2023-05-24 22:01:29 +02:00
11 changed files with 790 additions and 4 deletions

View File

@@ -3,7 +3,9 @@ run:
echo "This is a library - can't be run" && false echo "This is a library - can't be run" && false
test: test:
go test ./... # go test ./...
which gotestsum || go install gotest.tools/gotestsum@latest
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test"
version: version:
_data/version.sh _data/version.sh

318
bfcodegen/enum-generate.go Normal file
View File

@@ -0,0 +1,318 @@
package bfcodegen
import (
"errors"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"io"
"os"
"regexp"
"strings"
"time"
)
type EnumDefVal struct {
VarName string
Value string
Description *string
}
type EnumDef struct {
File string
EnumTypeName string
Type string
Values []EnumDefVal
}
var rexPackage = rext.W(regexp.MustCompile("^package\\s+(?P<name>[A-Za-z0-9_]+)\\s*$"))
var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*//\\s*(@enum:type).*$"))
var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$"))
func GenerateEnumSpecs(sourceDir string, destFile string) error {
files, err := os.ReadDir(sourceDir)
if err != nil {
return err
}
allEnums := make([]EnumDef, 0)
pkgname := ""
for _, f := range files {
if !strings.HasSuffix(f.Name(), ".go") {
continue
}
fmt.Printf("========= %s =========\n\n", f.Name())
fileEnums, pn, err := processFile(f.Name())
if err != nil {
return err
}
fmt.Printf("\n")
allEnums = append(allEnums, fileEnums...)
if pn != "" {
pkgname = pn
}
}
if pkgname == "" {
return errors.New("no package name found in any file")
}
err = os.WriteFile(destFile, []byte(fmtOutput(allEnums, pkgname)), 0o755)
if err != nil {
return err
}
res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second))
if err != nil {
return err
}
if res.CommandTimedOut {
fmt.Println(res.StdCombined)
return errors.New("go fmt timed out")
}
if res.ExitCode != 0 {
fmt.Println(res.StdCombined)
return errors.New("go fmt did not succeed")
}
return nil
}
func processFile(fn string) ([]EnumDef, string, error) {
file, err := os.Open(fn)
if err != nil {
return nil, "", err
}
defer func() { _ = file.Close() }()
bin, err := io.ReadAll(file)
if err != nil {
return nil, "", err
}
lines := strings.Split(string(bin), "\n")
enums := make([]EnumDef, 0)
pkgname := ""
for i, line := range lines {
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
break
}
if match, ok := rexPackage.MatchFirst(line); i == 0 && ok {
pkgname = match.GroupByName("name").Value()
continue
}
if match, ok := rexEnumDef.MatchFirst(line); ok {
def := EnumDef{
File: fn,
EnumTypeName: match.GroupByName("name").Value(),
Type: match.GroupByName("type").Value(),
Values: make([]EnumDefVal, 0),
}
enums = append(enums, def)
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
}
if match, ok := rexValueDef.MatchFirst(line); ok {
typename := match.GroupByName("type").Value()
def := EnumDefVal{
VarName: match.GroupByName("name").Value(),
Value: match.GroupByName("value").Value(),
Description: match.GroupByNameOrEmpty("descr").ValueOrNil(),
}
found := false
for i, v := range enums {
if v.EnumTypeName == typename {
enums[i].Values = append(enums[i].Values, def)
found = true
if def.Description != nil {
fmt.Printf("Found enum value [%s] for '%s' ('%s')\n", def.Value, def.VarName, *def.Description)
} else {
fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName)
}
break
}
}
if !found {
fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName)
}
}
}
return enums, pkgname, nil
}
func fmtOutput(enums []EnumDef, pkgname string) string {
str := "// Code generated by permissions_gen.sh DO NOT EDIT.\n"
str += "\n"
str += "package " + pkgname + "\n"
str += "\n"
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n"
str += "\n"
str += "type Enum interface {" + "\n"
str += " Valid() bool" + "\n"
str += " ValuesAny() []any" + "\n"
str += " ValuesMeta() []EnumMetaValue" + "\n"
str += " VarName() string" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "type StringEnum interface {" + "\n"
str += " Enum" + "\n"
str += " String() string" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "type DescriptionEnum interface {" + "\n"
str += " Enum" + "\n"
str += " Description() string" + "\n"
str += "}" + "\n"
str += "\n"
str += "type EnumMetaValue struct {" + "\n"
str += " VarName string `json:\"varName\"`" + "\n"
str += " Value any `json:\"value\"`" + "\n"
str += " Description *string `json:\"description\"`" + "\n"
str += "}" + "\n"
str += "\n"
for _, enumdef := range enums {
hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil })
hasStr := enumdef.Type == "string"
str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n"
str += "//" + "\n"
str += "// File: " + enumdef.File + "\n"
str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n"
str += "// DescrEnum: " + langext.Conditional(hasDescr, "true", "false") + "\n"
str += "//" + "\n"
str += "" + "\n"
str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n"
for _, v := range enumdef.Values {
str += " " + v.VarName + "," + "\n"
}
str += "}" + "\n"
str += "" + "\n"
if hasDescr {
str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n"
for _, v := range enumdef.Values {
str += " " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n"
}
str += "}" + "\n"
str += "" + "\n"
}
str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n"
for _, v := range enumdef.Values {
str += " " + v.VarName + ": \"" + v.VarName + "\"," + "\n"
}
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n"
str += " return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n"
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n"
str += " return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []EnumMetaValue {" + "\n"
str += " return []EnumMetaValue{" + "\n"
for _, v := range enumdef.Values {
if hasDescr {
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
} else {
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
}
}
str += " }" + "\n"
str += "}" + "\n"
str += "" + "\n"
if hasStr {
str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n"
str += " return string(e)" + "\n"
str += "}" + "\n"
str += "" + "\n"
}
if hasDescr {
str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n"
str += " if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n"
str += " return d" + "\n"
str += " }" + "\n"
str += " return \"\"" + "\n"
str += "}" + "\n"
str += "" + "\n"
}
str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n"
str += " if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n"
str += " return d" + "\n"
str += " }" + "\n"
str += " return \"\"" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n"
str += " for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n"
str += " if string(ev) == vv {" + "\n"
str += " return ev, true" + "\n"
str += " }" + "\n"
str += " }" + "\n"
str += " return \"\", false" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n"
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func " + enumdef.EnumTypeName + "ValuesMeta() []EnumMetaValue {" + "\n"
str += " return []EnumMetaValue{" + "\n"
for _, v := range enumdef.Values {
if hasDescr {
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
} else {
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
}
}
str += " }" + "\n"
str += "}" + "\n"
str += "" + "\n"
}
return str
}

View File

@@ -8,6 +8,7 @@ import (
"os" "os"
"reflect" "reflect"
"strconv" "strconv"
"strings"
"time" "time"
) )
@@ -172,6 +173,20 @@ func parseEnvToValue(envval string, fullEnvKey string, rvtype reflect.Type) (ref
return envcvl, nil return envcvl, nil
} else if rvtype.ConvertibleTo(reflect.TypeOf(false)) {
if strings.TrimSpace(strings.ToLower(envval)) == "true" {
return reflect.ValueOf(true).Convert(rvtype), nil
} else if strings.TrimSpace(strings.ToLower(envval)) == "false" {
return reflect.ValueOf(true).Convert(rvtype), nil
} else if strings.TrimSpace(strings.ToLower(envval)) == "1" {
return reflect.ValueOf(false).Convert(rvtype), nil
} else if strings.TrimSpace(strings.ToLower(envval)) == "0" {
return reflect.ValueOf(false).Convert(rvtype), nil
} else {
return reflect.Value{}, errors.New(fmt.Sprintf("Failed to parse env-config variable '%s' to <%s, ,bool> (value := '%s')", rvtype.Name(), fullEnvKey, envval))
}
} else if rvtype.ConvertibleTo(reflect.TypeOf("")) { } else if rvtype.ConvertibleTo(reflect.TypeOf("")) {
envcvl := reflect.ValueOf(envval).Convert(rvtype) envcvl := reflect.ValueOf(envval).Convert(rvtype)

View File

@@ -68,6 +68,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
V7 aliasstring `env:"TEST_V7"` V7 aliasstring `env:"TEST_V7"`
V8 time.Duration `env:"TEST_V8"` V8 time.Duration `env:"TEST_V8"`
V9 time.Time `env:"TEST_V9"` V9 time.Time `env:"TEST_V9"`
VA bool `env:"TEST_VA"`
} }
data := testdata{ data := testdata{
@@ -82,6 +83,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
V7: "7", V7: "7",
V8: 9, V8: 9,
V9: time.Unix(1671102873, 0), V9: time.Unix(1671102873, 0),
VA: false,
} }
t.Setenv("TEST_V1", "846") t.Setenv("TEST_V1", "846")
@@ -93,6 +95,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
t.Setenv("TEST_V7", "AAAAAA") t.Setenv("TEST_V7", "AAAAAA")
t.Setenv("TEST_V8", "1min4s") t.Setenv("TEST_V8", "1min4s")
t.Setenv("TEST_V9", "2009-11-10T23:00:00Z") t.Setenv("TEST_V9", "2009-11-10T23:00:00Z")
t.Setenv("TEST_VA", "true")
err := ApplyEnvOverrides("", &data, ".") err := ApplyEnvOverrides("", &data, ".")
if err != nil { if err != nil {
@@ -109,6 +112,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
tst.AssertEqual(t, data.V7, "AAAAAA") tst.AssertEqual(t, data.V7, "AAAAAA")
tst.AssertEqual(t, data.V8, time.Second*64) tst.AssertEqual(t, data.V8, time.Second*64)
tst.AssertEqual(t, data.V9, time.Unix(1257894000, 0).UTC()) tst.AssertEqual(t, data.V9, time.Unix(1257894000, 0).UTC())
tst.AssertEqual(t, data.VA, true)
} }
func TestApplyEnvOverridesRecursive(t *testing.T) { func TestApplyEnvOverridesRecursive(t *testing.T) {

View File

@@ -1,5 +1,10 @@
package langext package langext
type MapEntry[T comparable, V any] struct {
Key T
Value V
}
func MapKeyArr[T comparable, V any](v map[T]V) []T { func MapKeyArr[T comparable, V any](v map[T]V) []T {
result := make([]T, 0, len(v)) result := make([]T, 0, len(v))
for k := range v { for k := range v {
@@ -8,6 +13,14 @@ func MapKeyArr[T comparable, V any](v map[T]V) []T {
return result return result
} }
func MapValueArr[T comparable, V any](v map[T]V) []V {
result := make([]V, 0, len(v))
for _, mv := range v {
result = append(result, mv)
}
return result
}
func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V { func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
result := make(map[T]V, len(a)) result := make(map[T]V, len(a))
for _, v := range a { for _, v := range a {
@@ -16,6 +29,17 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
return result return result
} }
func MapToArr[T comparable, V any](v map[T]V) []MapEntry[T, V] {
result := make([]MapEntry[T, V], 0, len(v))
for mk, mv := range v {
result = append(result, MapEntry[T, V]{
Key: mk,
Value: mv,
})
}
return result
}
func CopyMap[K comparable, V any](a map[K]V) map[K]V { func CopyMap[K comparable, V any](a map[K]V) map[K]V {
result := make(map[K]V, len(a)) result := make(map[K]V, len(a))
for k, v := range a { for k, v := range a {
@@ -23,3 +47,11 @@ func CopyMap[K comparable, V any](a map[K]V) map[K]V {
} }
return result return result
} }
func ForceMap[K comparable, V any](v map[K]V) map[K]V {
if v == nil {
return make(map[K]V, 0)
} else {
return v
}
}

View File

@@ -4,6 +4,12 @@ import (
"reflect" "reflect"
) )
// PTrue := &true
var PTrue = Ptr(true)
// PFalse := &false
var PFalse = Ptr(false)
func Ptr[T any](v T) *T { func Ptr[T any](v T) *T {
return &v return &v
} }

View File

@@ -41,6 +41,14 @@ func NewHexUUID() (string, error) {
return string(dst), nil return string(dst), nil
} }
func MustHexUUID() string {
v, err := NewHexUUID()
if err != nil {
panic(err)
}
return v
}
func NewUpperHexUUID() (string, error) { func NewUpperHexUUID() (string, error) {
uuid, err := NewUUID() uuid, err := NewUUID()
if err != nil { if err != nil {
@@ -64,6 +72,14 @@ func NewUpperHexUUID() (string, error) {
return strings.ToUpper(string(dst)), nil return strings.ToUpper(string(dst)), nil
} }
func MustUpperHexUUID() string {
v, err := NewUpperHexUUID()
if err != nil {
panic(err)
}
return v
}
func NewRawHexUUID() (string, error) { func NewRawHexUUID() (string, error) {
uuid, err := NewUUID() uuid, err := NewUUID()
if err != nil { if err != nil {
@@ -83,6 +99,14 @@ func NewRawHexUUID() (string, error) {
return strings.ToUpper(string(dst)), nil return strings.ToUpper(string(dst)), nil
} }
func MustRawHexUUID() string {
v, err := NewRawHexUUID()
if err != nil {
panic(err)
}
return v
}
func NewBracesUUID() (string, error) { func NewBracesUUID() (string, error) {
uuid, err := NewUUID() uuid, err := NewUUID()
if err != nil { if err != nil {
@@ -108,6 +132,14 @@ func NewBracesUUID() (string, error) {
return strings.ToUpper(string(dst)), nil return strings.ToUpper(string(dst)), nil
} }
func MustBracesUUID() string {
v, err := NewBracesUUID()
if err != nil {
panic(err)
}
return v
}
func NewParensUUID() (string, error) { func NewParensUUID() (string, error) {
uuid, err := NewUUID() uuid, err := NewUUID()
if err != nil { if err != nil {
@@ -132,3 +164,11 @@ func NewParensUUID() (string, error) {
return strings.ToUpper(string(dst)), nil return strings.ToUpper(string(dst)), nil
} }
func MustParensUUID() string {
v, err := NewParensUUID()
if err != nil {
panic(err)
}
return v
}

View File

@@ -81,7 +81,7 @@ func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt)) return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt))
} }
var tt time.Time var tt time.Time
err := bson.Unmarshal(data, &tt) err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil { if err != nil {
return err return err
} }
@@ -116,6 +116,12 @@ func (t RFC3339Time) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueRead
return err return err
} }
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&t))
} else {
val.Set(reflect.ValueOf(t))
}
return nil return nil
} }

91
sq/converter.go Normal file
View File

@@ -0,0 +1,91 @@
package sq
import (
"errors"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"time"
)
//TODO UNFINISHED
// this is not finished
// idea was that we can register converter in the database struct
// they get inherited from the transactions
// and when marshallingunmarshaling (sq.Query | sq.QueryAll)
// or marshaling (sq.InsertSingle)
// the types get converter automatically...
type DBTypeConverter interface {
ModelTypeString() string
DBTypeString() string
ModelToDB(v any) (any, error)
DBToModel(v any) (any, error)
}
var ConverterBoolToBit = NewDBTypeConverter[bool, int](func(v bool) (int, error) {
return langext.Conditional(v, 1, 0), nil
}, func(v int) (bool, error) {
if v == 0 {
return false, nil
}
if v == 1 {
return true, nil
}
return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v))
})
var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) {
return v.UnixMilli(), nil
}, func(v int64) (time.Time, error) {
return time.UnixMilli(v), nil
})
var ConverterOptTimeToUnixMillis = NewDBTypeConverter[*time.Time, *int64](func(v *time.Time) (*int64, error) {
if v == nil {
return nil, nil
}
return langext.Ptr(v.UnixMilli()), nil
}, func(v *int64) (*time.Time, error) {
if v == nil {
return nil, nil
}
return langext.Ptr(time.UnixMilli(*v)), nil
})
type dbTypeConverterImpl[TModelData any, TDBData any] struct {
dbTypeString string
modelTypeString string
todb func(v TModelData) (TDBData, error)
tomodel func(v TDBData) (TModelData, error)
}
func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelTypeString() string {
return t.modelTypeString
}
func (t *dbTypeConverterImpl[TModelData, TDBData]) DBTypeString() string {
return t.dbTypeString
}
func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelToDB(v any) (any, error) {
if vv, ok := v.(TModelData); ok {
return t.todb(vv)
}
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.modelTypeString, v))
}
func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error) {
if vv, ok := v.(TDBData); ok {
return t.tomodel(vv)
}
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v))
}
func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter {
return &dbTypeConverterImpl[TModelData, TDBData]{
dbTypeString: fmt.Sprintf("%T", *new(TDBData)),
modelTypeString: fmt.Sprintf("%T", *new(TModelData)),
todb: todb,
tomodel: tomodel,
}
}

199
sq/hasher.go Normal file
View File

@@ -0,0 +1,199 @@
package sq
import (
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"github.com/jmoiron/sqlx"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"os"
"path/filepath"
"strings"
)
func HashSqliteSchema(ctx context.Context, schemaStr string) (string, error) {
dbdir := os.TempDir()
dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3")
err := os.MkdirAll(dbdir, os.ModePerm)
if err != nil {
return "", err
}
url := fmt.Sprintf("file:%s?_journal=%s&_timeout=%d&_fk=%s&_busy_timeout=%d", dbfile1, "DELETE", 1000, "true", 1000)
xdb, err := sqlx.Open("sqlite3", url)
if err != nil {
return "", err
}
db := NewDB(xdb)
_, err = db.Exec(ctx, schemaStr, PP{})
if err != nil {
return "", err
}
return HashSqliteDatabase(ctx, db)
}
func HashSqliteDatabase(ctx context.Context, db DB) (string, error) {
ss, err := CreateSqliteDatabaseSchemaString(ctx, db)
if err != nil {
return "", err
}
cs := sha256.Sum256([]byte(ss))
return hex.EncodeToString(cs[:]), nil
}
func CreateSqliteDatabaseSchemaString(ctx context.Context, db DB) (string, error) {
type colInfo struct {
Name string `db:"name"`
Type string `db:"type"`
NotNull string `db:"notnull"`
Default *string `db:"dflt_value"`
PrimaryKey *string `db:"pk"`
}
type idxInfo struct {
Name string `json:"name" db:"name"`
Unique int `json:"unique" db:"unique"`
Origin string `json:"origin" db:"origin"`
Patial int `json:"partial" db:"partial"`
}
type fkyInfo struct {
TableDest string `json:"table_dest" db:"table"`
From string `json:"from" db:"from"`
To string `json:"to" db:"to"`
OnUpdate string `json:"on_update" db:"on_update"`
OnDelete string `json:"on_delete" db:"on_delete"`
Match string `json:"match" db:"match"`
}
type tabInfo struct {
Name string `json:"name" db:"name"`
Type string `json:"type" db:"type"`
NumCol int `json:"ncol" db:"ncol"`
Strict int `json:"strict" db:"strict"`
ColumnInfo []colInfo `json:"-"`
IndexInfo []idxInfo `json:"-"`
FKeyInfo []fkyInfo `json:"-"`
}
rowsTableList, err := db.Query(ctx, "PRAGMA table_list;", PP{})
if err != nil {
return "", err
}
tableList, err := ScanAll[tabInfo](rowsTableList, SModeFast, Unsafe, true)
if err != nil {
return "", err
}
langext.SortBy(tableList, func(v tabInfo) string { return v.Name })
result := make([]tabInfo, 0)
for i, tab := range tableList {
if strings.HasPrefix(tab.Name, "sqlite_") {
continue
}
{
rowsColumnList, err := db.Query(ctx, fmt.Sprintf("PRAGMA table_info(\"%s\");", tab.Name), PP{})
if err != nil {
return "", err
}
columnList, err := ScanAll[colInfo](rowsColumnList, SModeFast, Unsafe, true)
if err != nil {
return "", err
}
langext.SortBy(columnList, func(v colInfo) string { return v.Name })
tableList[i].ColumnInfo = columnList
}
{
rowsIdxList, err := db.Query(ctx, fmt.Sprintf("PRAGMA index_list(\"%s\");", tab.Name), PP{})
if err != nil {
return "", err
}
idxList, err := ScanAll[idxInfo](rowsIdxList, SModeFast, Unsafe, true)
if err != nil {
return "", err
}
langext.SortBy(idxList, func(v idxInfo) string { return v.Name })
tableList[i].IndexInfo = idxList
}
{
rowsIdxList, err := db.Query(ctx, fmt.Sprintf("PRAGMA foreign_key_list(\"%s\");", tab.Name), PP{})
if err != nil {
return "", err
}
fkyList, err := ScanAll[fkyInfo](rowsIdxList, SModeFast, Unsafe, true)
if err != nil {
return "", err
}
langext.SortBy(fkyList, func(v fkyInfo) string { return v.From })
tableList[i].FKeyInfo = fkyList
}
result = append(result, tableList[i])
}
strBuilderResult := ""
for _, vTab := range result {
jbinTable, err := json.Marshal(vTab)
if err != nil {
return "", err
}
strBuilderResult += fmt.Sprintf("#TABLE: %s\n{\n", string(jbinTable))
for _, vCol := range vTab.ColumnInfo {
jbinColumn, err := json.Marshal(vCol)
if err != nil {
return "", err
}
strBuilderResult += fmt.Sprintf(" COLUMN: %s\n", string(jbinColumn))
}
for _, vIdx := range vTab.IndexInfo {
jbinIndex, err := json.Marshal(vIdx)
if err != nil {
return "", err
}
strBuilderResult += fmt.Sprintf(" INDEX: %s\n", string(jbinIndex))
}
for _, vFky := range vTab.FKeyInfo {
jbinFKey, err := json.Marshal(vFky)
if err != nil {
return "", err
}
strBuilderResult += fmt.Sprintf(" FKEY: %s\n", string(jbinFKey))
}
strBuilderResult += "}\n\n"
}
return strBuilderResult, nil
}

View File

@@ -1,9 +1,13 @@
package sq package sq
import ( import (
"context"
"database/sql" "database/sql"
"errors" "errors"
"fmt"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
"reflect"
"strings"
) )
type StructScanMode string type StructScanMode string
@@ -16,10 +20,79 @@ const (
type StructScanSafety string type StructScanSafety string
const ( const (
Safe StructScanSafety = "SAFE" Safe StructScanSafety = "SAFE" // return error for missing fields
Unsafe StructScanSafety = "UNSAFE" Unsafe StructScanSafety = "UNSAFE" // ignore missing fields
) )
func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, v TData) (sql.Result, error) {
rval := reflect.ValueOf(v)
rtyp := rval.Type()
columns := make([]string, 0)
params := make([]string, 0)
pp := PP{}
for i := 0; i < rtyp.NumField(); i++ {
rsfield := rtyp.Field(i)
rvfield := rval.Field(i)
if !rsfield.IsExported() {
continue
}
columnName := rsfield.Tag.Get("db")
if columnName == "" || columnName == "-" {
continue
}
paramkey := fmt.Sprintf("_%s", columnName)
columns = append(columns, "\""+columnName+"\"")
params = append(params, ":"+paramkey)
pp[paramkey] = rvfield.Interface()
}
sqlstr := fmt.Sprintf("INSERT"+" INTO \"%s\" (%s) VALUES (%s)", tableName, strings.Join(columns, ", "), strings.Join(params, ", "))
sqlr, err := q.Exec(ctx, sqlstr, pp)
if err != nil {
return nil, err
}
return sqlr, nil
}
func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) (TData, error) {
rows, err := q.Query(ctx, sql, pp)
if err != nil {
return *new(TData), err
}
data, err := ScanSingle[TData](rows, mode, sec, true)
if err != nil {
return *new(TData), err
}
return data, nil
}
func QueryAll[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) ([]TData, error) {
rows, err := q.Query(ctx, sql, pp)
if err != nil {
return nil, err
}
data, err := ScanAll[TData](rows, mode, sec, true)
if err != nil {
return nil, err
}
return data, nil
}
func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) (TData, error) { func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) (TData, error) {
if rows.Next() { if rows.Next() {
var strscan *StructScanner var strscan *StructScanner