Compare commits
17 Commits
Author | SHA1 | Date | |
---|---|---|---|
1fbae343a4
|
|||
31418bf0e6
|
|||
6d45f6f667
|
|||
f610a2202c
|
|||
2807299d46
|
|||
e872dbccec
|
|||
9daf71e2ed
|
|||
fe278f7772
|
|||
8ebda6fb3a
|
|||
b0d3ce8c1c
|
|||
021465e524
|
|||
cf9c73aa4a
|
|||
0652bf22dc
|
|||
b196adffc7
|
|||
717065e62d
|
|||
e7b2b040b2
|
|||
05d0f9e469
|
4
Makefile
4
Makefile
@@ -3,7 +3,9 @@ run:
|
||||
echo "This is a library - can't be run" && false
|
||||
|
||||
test:
|
||||
go test ./...
|
||||
# go test ./...
|
||||
which gotestsum || go install gotest.tools/gotestsum@latest
|
||||
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test"
|
||||
|
||||
version:
|
||||
_data/version.sh
|
318
bfcodegen/enum-generate.go
Normal file
318
bfcodegen/enum-generate.go
Normal file
@@ -0,0 +1,318 @@
|
||||
package bfcodegen
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type EnumDefVal struct {
|
||||
VarName string
|
||||
Value string
|
||||
Description *string
|
||||
}
|
||||
|
||||
type EnumDef struct {
|
||||
File string
|
||||
EnumTypeName string
|
||||
Type string
|
||||
Values []EnumDefVal
|
||||
}
|
||||
|
||||
var rexPackage = rext.W(regexp.MustCompile("^package\\s+(?P<name>[A-Za-z0-9_]+)\\s*$"))
|
||||
|
||||
var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*//\\s*(@enum:type).*$"))
|
||||
|
||||
var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$"))
|
||||
|
||||
func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
|
||||
files, err := os.ReadDir(sourceDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
allEnums := make([]EnumDef, 0)
|
||||
|
||||
pkgname := ""
|
||||
|
||||
for _, f := range files {
|
||||
if !strings.HasSuffix(f.Name(), ".go") {
|
||||
continue
|
||||
}
|
||||
|
||||
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||
fileEnums, pn, err := processFile(f.Name())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("\n")
|
||||
|
||||
allEnums = append(allEnums, fileEnums...)
|
||||
|
||||
if pn != "" {
|
||||
pkgname = pn
|
||||
}
|
||||
}
|
||||
|
||||
if pkgname == "" {
|
||||
return errors.New("no package name found in any file")
|
||||
}
|
||||
|
||||
err = os.WriteFile(destFile, []byte(fmtOutput(allEnums, pkgname)), 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if res.CommandTimedOut {
|
||||
fmt.Println(res.StdCombined)
|
||||
return errors.New("go fmt timed out")
|
||||
}
|
||||
if res.ExitCode != 0 {
|
||||
fmt.Println(res.StdCombined)
|
||||
return errors.New("go fmt did not succeed")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func processFile(fn string) ([]EnumDef, string, error) {
|
||||
file, err := os.Open(fn)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
defer func() { _ = file.Close() }()
|
||||
|
||||
bin, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
lines := strings.Split(string(bin), "\n")
|
||||
|
||||
enums := make([]EnumDef, 0)
|
||||
|
||||
pkgname := ""
|
||||
|
||||
for i, line := range lines {
|
||||
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
|
||||
break
|
||||
}
|
||||
|
||||
if match, ok := rexPackage.MatchFirst(line); i == 0 && ok {
|
||||
pkgname = match.GroupByName("name").Value()
|
||||
continue
|
||||
}
|
||||
|
||||
if match, ok := rexEnumDef.MatchFirst(line); ok {
|
||||
def := EnumDef{
|
||||
File: fn,
|
||||
EnumTypeName: match.GroupByName("name").Value(),
|
||||
Type: match.GroupByName("type").Value(),
|
||||
Values: make([]EnumDefVal, 0),
|
||||
}
|
||||
enums = append(enums, def)
|
||||
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
|
||||
}
|
||||
|
||||
if match, ok := rexValueDef.MatchFirst(line); ok {
|
||||
typename := match.GroupByName("type").Value()
|
||||
def := EnumDefVal{
|
||||
VarName: match.GroupByName("name").Value(),
|
||||
Value: match.GroupByName("value").Value(),
|
||||
Description: match.GroupByNameOrEmpty("descr").ValueOrNil(),
|
||||
}
|
||||
|
||||
found := false
|
||||
for i, v := range enums {
|
||||
if v.EnumTypeName == typename {
|
||||
enums[i].Values = append(enums[i].Values, def)
|
||||
found = true
|
||||
if def.Description != nil {
|
||||
fmt.Printf("Found enum value [%s] for '%s' ('%s')\n", def.Value, def.VarName, *def.Description)
|
||||
} else {
|
||||
fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return enums, pkgname, nil
|
||||
}
|
||||
|
||||
func fmtOutput(enums []EnumDef, pkgname string) string {
|
||||
str := "// Code generated by permissions_gen.sh DO NOT EDIT.\n"
|
||||
str += "\n"
|
||||
str += "package " + pkgname + "\n"
|
||||
str += "\n"
|
||||
|
||||
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n"
|
||||
str += "\n"
|
||||
|
||||
str += "type Enum interface {" + "\n"
|
||||
str += " Valid() bool" + "\n"
|
||||
str += " ValuesAny() []any" + "\n"
|
||||
str += " ValuesMeta() []EnumMetaValue" + "\n"
|
||||
str += " VarName() string" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "type StringEnum interface {" + "\n"
|
||||
str += " Enum" + "\n"
|
||||
str += " String() string" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "type DescriptionEnum interface {" + "\n"
|
||||
str += " Enum" + "\n"
|
||||
str += " Description() string" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "\n"
|
||||
|
||||
str += "type EnumMetaValue struct {" + "\n"
|
||||
str += " VarName string `json:\"varName\"`" + "\n"
|
||||
str += " Value any `json:\"value\"`" + "\n"
|
||||
str += " Description *string `json:\"description\"`" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "\n"
|
||||
|
||||
for _, enumdef := range enums {
|
||||
|
||||
hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil })
|
||||
hasStr := enumdef.Type == "string"
|
||||
|
||||
str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n"
|
||||
str += "//" + "\n"
|
||||
str += "// File: " + enumdef.File + "\n"
|
||||
str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n"
|
||||
str += "// DescrEnum: " + langext.Conditional(hasDescr, "true", "false") + "\n"
|
||||
str += "//" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
str += " " + v.VarName + "," + "\n"
|
||||
}
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
if hasDescr {
|
||||
str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
str += " " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n"
|
||||
}
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
}
|
||||
|
||||
str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
str += " " + v.VarName + ": \"" + v.VarName + "\"," + "\n"
|
||||
}
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n"
|
||||
str += " return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n"
|
||||
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n"
|
||||
str += " return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []EnumMetaValue {" + "\n"
|
||||
str += " return []EnumMetaValue{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
if hasDescr {
|
||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
|
||||
} else {
|
||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
|
||||
}
|
||||
}
|
||||
str += " }" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
if hasStr {
|
||||
str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n"
|
||||
str += " return string(e)" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
}
|
||||
|
||||
if hasDescr {
|
||||
str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n"
|
||||
str += " if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n"
|
||||
str += " return d" + "\n"
|
||||
str += " }" + "\n"
|
||||
str += " return \"\"" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
}
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n"
|
||||
str += " if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n"
|
||||
str += " return d" + "\n"
|
||||
str += " }" + "\n"
|
||||
str += " return \"\"" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n"
|
||||
str += " for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n"
|
||||
str += " if string(ev) == vv {" + "\n"
|
||||
str += " return ev, true" + "\n"
|
||||
str += " }" + "\n"
|
||||
str += " }" + "\n"
|
||||
str += " return \"\", false" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n"
|
||||
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func " + enumdef.EnumTypeName + "ValuesMeta() []EnumMetaValue {" + "\n"
|
||||
str += " return []EnumMetaValue{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
if hasDescr {
|
||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
|
||||
} else {
|
||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
|
||||
}
|
||||
}
|
||||
str += " }" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
}
|
||||
|
||||
return str
|
||||
}
|
@@ -8,6 +8,7 @@ import (
|
||||
"os"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -172,6 +173,20 @@ func parseEnvToValue(envval string, fullEnvKey string, rvtype reflect.Type) (ref
|
||||
|
||||
return envcvl, nil
|
||||
|
||||
} else if rvtype.ConvertibleTo(reflect.TypeOf(false)) {
|
||||
|
||||
if strings.TrimSpace(strings.ToLower(envval)) == "true" {
|
||||
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "false" {
|
||||
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "1" {
|
||||
return reflect.ValueOf(false).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "0" {
|
||||
return reflect.ValueOf(false).Convert(rvtype), nil
|
||||
} else {
|
||||
return reflect.Value{}, errors.New(fmt.Sprintf("Failed to parse env-config variable '%s' to <%s, ,bool> (value := '%s')", rvtype.Name(), fullEnvKey, envval))
|
||||
}
|
||||
|
||||
} else if rvtype.ConvertibleTo(reflect.TypeOf("")) {
|
||||
|
||||
envcvl := reflect.ValueOf(envval).Convert(rvtype)
|
||||
|
@@ -68,6 +68,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
V7 aliasstring `env:"TEST_V7"`
|
||||
V8 time.Duration `env:"TEST_V8"`
|
||||
V9 time.Time `env:"TEST_V9"`
|
||||
VA bool `env:"TEST_VA"`
|
||||
}
|
||||
|
||||
data := testdata{
|
||||
@@ -82,6 +83,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
V7: "7",
|
||||
V8: 9,
|
||||
V9: time.Unix(1671102873, 0),
|
||||
VA: false,
|
||||
}
|
||||
|
||||
t.Setenv("TEST_V1", "846")
|
||||
@@ -93,6 +95,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
t.Setenv("TEST_V7", "AAAAAA")
|
||||
t.Setenv("TEST_V8", "1min4s")
|
||||
t.Setenv("TEST_V9", "2009-11-10T23:00:00Z")
|
||||
t.Setenv("TEST_VA", "true")
|
||||
|
||||
err := ApplyEnvOverrides("", &data, ".")
|
||||
if err != nil {
|
||||
@@ -109,6 +112,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
tst.AssertEqual(t, data.V7, "AAAAAA")
|
||||
tst.AssertEqual(t, data.V8, time.Second*64)
|
||||
tst.AssertEqual(t, data.V9, time.Unix(1257894000, 0).UTC())
|
||||
tst.AssertEqual(t, data.VA, true)
|
||||
}
|
||||
|
||||
func TestApplyEnvOverridesRecursive(t *testing.T) {
|
||||
|
@@ -433,3 +433,10 @@ func ArrConcat[T any](arr ...[]T) []T {
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// ArrCopy does a shallow copy of the 'in' array
|
||||
func ArrCopy[T any](in []T) []T {
|
||||
out := make([]T, len(in))
|
||||
copy(out, in)
|
||||
return out
|
||||
}
|
||||
|
@@ -31,16 +31,16 @@ func CompareIntArr(arr1 []int, arr2 []int) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) bool {
|
||||
func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) int {
|
||||
|
||||
for i := 0; i < len(arr1) || i < len(arr2); i++ {
|
||||
|
||||
if i < len(arr1) && i < len(arr2) {
|
||||
|
||||
if arr1[i] < arr2[i] {
|
||||
return true
|
||||
return -1
|
||||
} else if arr1[i] > arr2[i] {
|
||||
return false
|
||||
return +2
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
@@ -49,15 +49,55 @@ func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) bool {
|
||||
|
||||
if i < len(arr1) {
|
||||
|
||||
return true
|
||||
return +1
|
||||
|
||||
} else { // if i < len(arr2)
|
||||
|
||||
return false
|
||||
return -1
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return false
|
||||
return 0
|
||||
}
|
||||
|
||||
func CompareString(a, b string) int {
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return +1
|
||||
}
|
||||
|
||||
func CompareInt(a, b int) int {
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return +1
|
||||
}
|
||||
|
||||
func CompareInt64(a, b int64) int {
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return +1
|
||||
}
|
||||
|
||||
func Compare[T OrderedConstraint](a, b T) int {
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return +1
|
||||
}
|
||||
|
@@ -1,5 +1,10 @@
|
||||
package langext
|
||||
|
||||
type MapEntry[T comparable, V any] struct {
|
||||
Key T
|
||||
Value V
|
||||
}
|
||||
|
||||
func MapKeyArr[T comparable, V any](v map[T]V) []T {
|
||||
result := make([]T, 0, len(v))
|
||||
for k := range v {
|
||||
@@ -8,6 +13,14 @@ func MapKeyArr[T comparable, V any](v map[T]V) []T {
|
||||
return result
|
||||
}
|
||||
|
||||
func MapValueArr[T comparable, V any](v map[T]V) []V {
|
||||
result := make([]V, 0, len(v))
|
||||
for _, mv := range v {
|
||||
result = append(result, mv)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
|
||||
result := make(map[T]V, len(a))
|
||||
for _, v := range a {
|
||||
@@ -16,6 +29,17 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
|
||||
return result
|
||||
}
|
||||
|
||||
func MapToArr[T comparable, V any](v map[T]V) []MapEntry[T, V] {
|
||||
result := make([]MapEntry[T, V], 0, len(v))
|
||||
for mk, mv := range v {
|
||||
result = append(result, MapEntry[T, V]{
|
||||
Key: mk,
|
||||
Value: mv,
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func CopyMap[K comparable, V any](a map[K]V) map[K]V {
|
||||
result := make(map[K]V, len(a))
|
||||
for k, v := range a {
|
||||
@@ -23,3 +47,11 @@ func CopyMap[K comparable, V any](a map[K]V) map[K]V {
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func ForceMap[K comparable, V any](v map[K]V) map[K]V {
|
||||
if v == nil {
|
||||
return make(map[K]V, 0)
|
||||
} else {
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
@@ -4,6 +4,12 @@ import (
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// PTrue := &true
|
||||
var PTrue = Ptr(true)
|
||||
|
||||
// PFalse := &false
|
||||
var PFalse = Ptr(false)
|
||||
|
||||
func Ptr[T any](v T) *T {
|
||||
return &v
|
||||
}
|
||||
|
111
langext/reflection.go
Normal file
111
langext/reflection.go
Normal file
@@ -0,0 +1,111 @@
|
||||
package langext
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
)
|
||||
|
||||
var reflectBasicTypes = []reflect.Type{
|
||||
reflect.Bool: reflect.TypeOf(false),
|
||||
reflect.Int: reflect.TypeOf(int(0)),
|
||||
reflect.Int8: reflect.TypeOf(int8(0)),
|
||||
reflect.Int16: reflect.TypeOf(int16(0)),
|
||||
reflect.Int32: reflect.TypeOf(int32(0)),
|
||||
reflect.Int64: reflect.TypeOf(int64(0)),
|
||||
reflect.Uint: reflect.TypeOf(uint(0)),
|
||||
reflect.Uint8: reflect.TypeOf(uint8(0)),
|
||||
reflect.Uint16: reflect.TypeOf(uint16(0)),
|
||||
reflect.Uint32: reflect.TypeOf(uint32(0)),
|
||||
reflect.Uint64: reflect.TypeOf(uint64(0)),
|
||||
reflect.Uintptr: reflect.TypeOf(uintptr(0)),
|
||||
reflect.Float32: reflect.TypeOf(float32(0)),
|
||||
reflect.Float64: reflect.TypeOf(float64(0)),
|
||||
reflect.Complex64: reflect.TypeOf(complex64(0)),
|
||||
reflect.Complex128: reflect.TypeOf(complex128(0)),
|
||||
reflect.String: reflect.TypeOf(""),
|
||||
}
|
||||
|
||||
// Underlying returns the underlying type of t (without type alias)
|
||||
//
|
||||
// https://github.com/golang/go/issues/39574#issuecomment-655664772
|
||||
func Underlying(t reflect.Type) (ret reflect.Type) {
|
||||
if t.Name() == "" {
|
||||
// t is an unnamed type. the underlying type is t itself
|
||||
return t
|
||||
}
|
||||
kind := t.Kind()
|
||||
if ret = reflectBasicTypes[kind]; ret != nil {
|
||||
return ret
|
||||
}
|
||||
switch kind {
|
||||
case reflect.Array:
|
||||
ret = reflect.ArrayOf(t.Len(), t.Elem())
|
||||
case reflect.Chan:
|
||||
ret = reflect.ChanOf(t.ChanDir(), t.Elem())
|
||||
case reflect.Map:
|
||||
ret = reflect.MapOf(t.Key(), t.Elem())
|
||||
case reflect.Func:
|
||||
nIn := t.NumIn()
|
||||
nOut := t.NumOut()
|
||||
in := make([]reflect.Type, nIn)
|
||||
out := make([]reflect.Type, nOut)
|
||||
for i := 0; i < nIn; i++ {
|
||||
in[i] = t.In(i)
|
||||
}
|
||||
for i := 0; i < nOut; i++ {
|
||||
out[i] = t.Out(i)
|
||||
}
|
||||
ret = reflect.FuncOf(in, out, t.IsVariadic())
|
||||
case reflect.Interface:
|
||||
// not supported
|
||||
case reflect.Ptr:
|
||||
ret = reflect.PtrTo(t.Elem())
|
||||
case reflect.Slice:
|
||||
ret = reflect.SliceOf(t.Elem())
|
||||
case reflect.Struct:
|
||||
// only partially supported: embedded fields
|
||||
// and unexported fields may cause panic in reflect.StructOf()
|
||||
defer func() {
|
||||
// if a panic happens, return t unmodified
|
||||
if recover() != nil && ret == nil {
|
||||
ret = t
|
||||
}
|
||||
}()
|
||||
n := t.NumField()
|
||||
fields := make([]reflect.StructField, n)
|
||||
for i := 0; i < n; i++ {
|
||||
fields[i] = t.Field(i)
|
||||
}
|
||||
ret = reflect.StructOf(fields)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// TryCast works similar to `v2, ok := v.(T)`
|
||||
// Except it works through type alias'
|
||||
func TryCast[T any](v any) (T, bool) {
|
||||
|
||||
underlying := Underlying(reflect.TypeOf(v))
|
||||
|
||||
def := *new(T)
|
||||
|
||||
if underlying != Underlying(reflect.TypeOf(def)) {
|
||||
return def, false
|
||||
}
|
||||
|
||||
r1 := reflect.ValueOf(v)
|
||||
|
||||
if !r1.CanConvert(underlying) {
|
||||
return def, false
|
||||
}
|
||||
|
||||
r2 := r1.Convert(underlying)
|
||||
|
||||
r3 := r2.Interface()
|
||||
|
||||
r4, ok := r3.(T)
|
||||
if !ok {
|
||||
return def, false
|
||||
}
|
||||
|
||||
return r4, true
|
||||
}
|
@@ -41,6 +41,14 @@ func NewHexUUID() (string, error) {
|
||||
return string(dst), nil
|
||||
}
|
||||
|
||||
func MustHexUUID() string {
|
||||
v, err := NewHexUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func NewUpperHexUUID() (string, error) {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
@@ -64,6 +72,14 @@ func NewUpperHexUUID() (string, error) {
|
||||
return strings.ToUpper(string(dst)), nil
|
||||
}
|
||||
|
||||
func MustUpperHexUUID() string {
|
||||
v, err := NewUpperHexUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func NewRawHexUUID() (string, error) {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
@@ -83,6 +99,14 @@ func NewRawHexUUID() (string, error) {
|
||||
return strings.ToUpper(string(dst)), nil
|
||||
}
|
||||
|
||||
func MustRawHexUUID() string {
|
||||
v, err := NewRawHexUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func NewBracesUUID() (string, error) {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
@@ -108,6 +132,14 @@ func NewBracesUUID() (string, error) {
|
||||
return strings.ToUpper(string(dst)), nil
|
||||
}
|
||||
|
||||
func MustBracesUUID() string {
|
||||
v, err := NewBracesUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func NewParensUUID() (string, error) {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
@@ -132,3 +164,11 @@ func NewParensUUID() (string, error) {
|
||||
|
||||
return strings.ToUpper(string(dst)), nil
|
||||
}
|
||||
|
||||
func MustParensUUID() string {
|
||||
v, err := NewParensUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
@@ -22,6 +22,31 @@ func Max[T langext.OrderedConstraint](v1 T, v2 T) T {
|
||||
}
|
||||
}
|
||||
|
||||
func Max3[T langext.OrderedConstraint](v1 T, v2 T, v3 T) T {
|
||||
result := v1
|
||||
if v2 > result {
|
||||
result = v2
|
||||
}
|
||||
if v3 > result {
|
||||
result = v3
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func Max4[T langext.OrderedConstraint](v1 T, v2 T, v3 T, v4 T) T {
|
||||
result := v1
|
||||
if v2 > result {
|
||||
result = v2
|
||||
}
|
||||
if v3 > result {
|
||||
result = v3
|
||||
}
|
||||
if v4 > result {
|
||||
result = v4
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func Min[T langext.OrderedConstraint](v1 T, v2 T) T {
|
||||
if v1 < v2 {
|
||||
return v1
|
||||
@@ -30,6 +55,31 @@ func Min[T langext.OrderedConstraint](v1 T, v2 T) T {
|
||||
}
|
||||
}
|
||||
|
||||
func Min3[T langext.OrderedConstraint](v1 T, v2 T, v3 T) T {
|
||||
result := v1
|
||||
if v2 < result {
|
||||
result = v2
|
||||
}
|
||||
if v3 < result {
|
||||
result = v3
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func Min4[T langext.OrderedConstraint](v1 T, v2 T, v3 T, v4 T) T {
|
||||
result := v1
|
||||
if v2 < result {
|
||||
result = v2
|
||||
}
|
||||
if v3 < result {
|
||||
result = v3
|
||||
}
|
||||
if v4 < result {
|
||||
result = v4
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func Abs[T langext.NumberConstraint](v T) T {
|
||||
if v < 0 {
|
||||
return -v
|
||||
|
49
mongoext/pipeline.go
Normal file
49
mongoext/pipeline.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package mongoext
|
||||
|
||||
import (
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
)
|
||||
|
||||
// FixTextSearchPipeline moves {$match:{$text:{$search}}} entries to the front of the pipeline (otherwise its an mongo error)
|
||||
func FixTextSearchPipeline(pipeline mongo.Pipeline) mongo.Pipeline {
|
||||
|
||||
dget := func(v bson.D, k string) (bson.M, bool) {
|
||||
for _, e := range v {
|
||||
if e.Key == k {
|
||||
if mv, ok := e.Value.(bson.M); ok {
|
||||
return mv, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
mget := func(v bson.M, k string) (bson.M, bool) {
|
||||
for ekey, eval := range v {
|
||||
if ekey == k {
|
||||
if mv, ok := eval.(bson.M); ok {
|
||||
return mv, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
result := make([]bson.D, 0, len(pipeline))
|
||||
|
||||
for _, entry := range pipeline {
|
||||
|
||||
if v0, ok := dget(entry, "$match"); ok {
|
||||
if v1, ok := mget(v0, "$text"); ok {
|
||||
if _, ok := v1["$search"]; ok {
|
||||
result = append([]bson.D{entry}, result...)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = append(result, entry)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
30
mongoext/projections.go
Normal file
30
mongoext/projections.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package mongoext
|
||||
|
||||
import (
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ProjectionFromStruct automatically generated a mongodb projection for a struct
|
||||
// This way you can pretty much always write
|
||||
// `options.FindOne().SetProjection(mongoutils.ProjectionFromStruct(...your_model...))`
|
||||
// to only get the data from mongodb that you will actually use in the later decode step
|
||||
func ProjectionFromStruct(obj interface{}) bson.M {
|
||||
v := reflect.ValueOf(obj)
|
||||
t := v.Type()
|
||||
|
||||
result := bson.M{}
|
||||
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
tag := t.Field(i).Tag.Get("bson")
|
||||
if tag == "" {
|
||||
continue
|
||||
}
|
||||
tag = strings.Split(tag, ",")[0]
|
||||
|
||||
result[tag] = 1
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
@@ -71,6 +71,7 @@ func (t *RFC3339Time) UnmarshalText(data []byte) error {
|
||||
func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
|
||||
if bt == bsontype.Null {
|
||||
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
|
||||
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
|
||||
// https://stackoverflow.com/questions/75167597
|
||||
// https://jira.mongodb.org/browse/GODRIVER-2252
|
||||
*t = RFC3339Time{}
|
||||
@@ -80,7 +81,7 @@ func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
|
||||
return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt))
|
||||
}
|
||||
var tt time.Time
|
||||
err := bson.Unmarshal(data, &tt)
|
||||
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -115,6 +116,12 @@ func (t RFC3339Time) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueRead
|
||||
return err
|
||||
}
|
||||
|
||||
if val.Kind() == reflect.Ptr {
|
||||
val.Set(reflect.ValueOf(&t))
|
||||
} else {
|
||||
val.Set(reflect.ValueOf(t))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@@ -71,6 +71,7 @@ func (t *RFC3339NanoTime) UnmarshalText(data []byte) error {
|
||||
func (t *RFC3339NanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
|
||||
if bt == bsontype.Null {
|
||||
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
|
||||
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
|
||||
// https://stackoverflow.com/questions/75167597
|
||||
// https://jira.mongodb.org/browse/GODRIVER-2252
|
||||
*t = RFC3339NanoTime{}
|
||||
@@ -115,7 +116,11 @@ func (t RFC3339NanoTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.Value
|
||||
return err
|
||||
}
|
||||
|
||||
if val.Kind() == reflect.Ptr {
|
||||
val.Set(reflect.ValueOf(&t))
|
||||
} else {
|
||||
val.Set(reflect.ValueOf(t))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
91
sq/converter.go
Normal file
91
sq/converter.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"time"
|
||||
)
|
||||
|
||||
//TODO UNFINISHED
|
||||
// this is not finished
|
||||
// idea was that we can register converter in the database struct
|
||||
// they get inherited from the transactions
|
||||
// and when marshallingunmarshaling (sq.Query | sq.QueryAll)
|
||||
// or marshaling (sq.InsertSingle)
|
||||
// the types get converter automatically...
|
||||
|
||||
type DBTypeConverter interface {
|
||||
ModelTypeString() string
|
||||
DBTypeString() string
|
||||
ModelToDB(v any) (any, error)
|
||||
DBToModel(v any) (any, error)
|
||||
}
|
||||
|
||||
var ConverterBoolToBit = NewDBTypeConverter[bool, int](func(v bool) (int, error) {
|
||||
return langext.Conditional(v, 1, 0), nil
|
||||
}, func(v int) (bool, error) {
|
||||
if v == 0 {
|
||||
return false, nil
|
||||
}
|
||||
if v == 1 {
|
||||
return true, nil
|
||||
}
|
||||
return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v))
|
||||
})
|
||||
|
||||
var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) {
|
||||
return v.UnixMilli(), nil
|
||||
}, func(v int64) (time.Time, error) {
|
||||
return time.UnixMilli(v), nil
|
||||
})
|
||||
|
||||
var ConverterOptTimeToUnixMillis = NewDBTypeConverter[*time.Time, *int64](func(v *time.Time) (*int64, error) {
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return langext.Ptr(v.UnixMilli()), nil
|
||||
}, func(v *int64) (*time.Time, error) {
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return langext.Ptr(time.UnixMilli(*v)), nil
|
||||
})
|
||||
|
||||
type dbTypeConverterImpl[TModelData any, TDBData any] struct {
|
||||
dbTypeString string
|
||||
modelTypeString string
|
||||
todb func(v TModelData) (TDBData, error)
|
||||
tomodel func(v TDBData) (TModelData, error)
|
||||
}
|
||||
|
||||
func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelTypeString() string {
|
||||
return t.modelTypeString
|
||||
}
|
||||
|
||||
func (t *dbTypeConverterImpl[TModelData, TDBData]) DBTypeString() string {
|
||||
return t.dbTypeString
|
||||
}
|
||||
|
||||
func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelToDB(v any) (any, error) {
|
||||
if vv, ok := v.(TModelData); ok {
|
||||
return t.todb(vv)
|
||||
}
|
||||
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.modelTypeString, v))
|
||||
}
|
||||
|
||||
func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error) {
|
||||
if vv, ok := v.(TDBData); ok {
|
||||
return t.tomodel(vv)
|
||||
}
|
||||
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v))
|
||||
}
|
||||
|
||||
func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter {
|
||||
return &dbTypeConverterImpl[TModelData, TDBData]{
|
||||
dbTypeString: fmt.Sprintf("%T", *new(TDBData)),
|
||||
modelTypeString: fmt.Sprintf("%T", *new(TModelData)),
|
||||
todb: todb,
|
||||
tomodel: tomodel,
|
||||
}
|
||||
}
|
199
sq/hasher.go
Normal file
199
sq/hasher.go
Normal file
@@ -0,0 +1,199 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func HashSqliteSchema(ctx context.Context, schemaStr string) (string, error) {
|
||||
dbdir := os.TempDir()
|
||||
dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3")
|
||||
|
||||
err := os.MkdirAll(dbdir, os.ModePerm)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("file:%s?_journal=%s&_timeout=%d&_fk=%s&_busy_timeout=%d", dbfile1, "DELETE", 1000, "true", 1000)
|
||||
|
||||
xdb, err := sqlx.Open("sqlite3", url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
db := NewDB(xdb)
|
||||
|
||||
_, err = db.Exec(ctx, schemaStr, PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return HashSqliteDatabase(ctx, db)
|
||||
}
|
||||
|
||||
func HashSqliteDatabase(ctx context.Context, db DB) (string, error) {
|
||||
ss, err := CreateSqliteDatabaseSchemaString(ctx, db)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
cs := sha256.Sum256([]byte(ss))
|
||||
|
||||
return hex.EncodeToString(cs[:]), nil
|
||||
}
|
||||
|
||||
func CreateSqliteDatabaseSchemaString(ctx context.Context, db DB) (string, error) {
|
||||
|
||||
type colInfo struct {
|
||||
Name string `db:"name"`
|
||||
Type string `db:"type"`
|
||||
NotNull string `db:"notnull"`
|
||||
Default *string `db:"dflt_value"`
|
||||
PrimaryKey *string `db:"pk"`
|
||||
}
|
||||
|
||||
type idxInfo struct {
|
||||
Name string `json:"name" db:"name"`
|
||||
Unique int `json:"unique" db:"unique"`
|
||||
Origin string `json:"origin" db:"origin"`
|
||||
Patial int `json:"partial" db:"partial"`
|
||||
}
|
||||
|
||||
type fkyInfo struct {
|
||||
TableDest string `json:"table_dest" db:"table"`
|
||||
From string `json:"from" db:"from"`
|
||||
To string `json:"to" db:"to"`
|
||||
OnUpdate string `json:"on_update" db:"on_update"`
|
||||
OnDelete string `json:"on_delete" db:"on_delete"`
|
||||
Match string `json:"match" db:"match"`
|
||||
}
|
||||
|
||||
type tabInfo struct {
|
||||
Name string `json:"name" db:"name"`
|
||||
Type string `json:"type" db:"type"`
|
||||
NumCol int `json:"ncol" db:"ncol"`
|
||||
Strict int `json:"strict" db:"strict"`
|
||||
|
||||
ColumnInfo []colInfo `json:"-"`
|
||||
IndexInfo []idxInfo `json:"-"`
|
||||
FKeyInfo []fkyInfo `json:"-"`
|
||||
}
|
||||
|
||||
rowsTableList, err := db.Query(ctx, "PRAGMA table_list;", PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
tableList, err := ScanAll[tabInfo](rowsTableList, SModeFast, Unsafe, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
langext.SortBy(tableList, func(v tabInfo) string { return v.Name })
|
||||
|
||||
result := make([]tabInfo, 0)
|
||||
|
||||
for i, tab := range tableList {
|
||||
|
||||
if strings.HasPrefix(tab.Name, "sqlite_") {
|
||||
continue
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
rowsColumnList, err := db.Query(ctx, fmt.Sprintf("PRAGMA table_info(\"%s\");", tab.Name), PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
columnList, err := ScanAll[colInfo](rowsColumnList, SModeFast, Unsafe, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
langext.SortBy(columnList, func(v colInfo) string { return v.Name })
|
||||
|
||||
tableList[i].ColumnInfo = columnList
|
||||
}
|
||||
|
||||
{
|
||||
rowsIdxList, err := db.Query(ctx, fmt.Sprintf("PRAGMA index_list(\"%s\");", tab.Name), PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
idxList, err := ScanAll[idxInfo](rowsIdxList, SModeFast, Unsafe, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
langext.SortBy(idxList, func(v idxInfo) string { return v.Name })
|
||||
|
||||
tableList[i].IndexInfo = idxList
|
||||
}
|
||||
|
||||
{
|
||||
rowsIdxList, err := db.Query(ctx, fmt.Sprintf("PRAGMA foreign_key_list(\"%s\");", tab.Name), PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
fkyList, err := ScanAll[fkyInfo](rowsIdxList, SModeFast, Unsafe, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
langext.SortBy(fkyList, func(v fkyInfo) string { return v.From })
|
||||
|
||||
tableList[i].FKeyInfo = fkyList
|
||||
}
|
||||
|
||||
result = append(result, tableList[i])
|
||||
}
|
||||
|
||||
strBuilderResult := ""
|
||||
for _, vTab := range result {
|
||||
jbinTable, err := json.Marshal(vTab)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
strBuilderResult += fmt.Sprintf("#TABLE: %s\n{\n", string(jbinTable))
|
||||
|
||||
for _, vCol := range vTab.ColumnInfo {
|
||||
jbinColumn, err := json.Marshal(vCol)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
strBuilderResult += fmt.Sprintf(" COLUMN: %s\n", string(jbinColumn))
|
||||
}
|
||||
|
||||
for _, vIdx := range vTab.IndexInfo {
|
||||
jbinIndex, err := json.Marshal(vIdx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
strBuilderResult += fmt.Sprintf(" INDEX: %s\n", string(jbinIndex))
|
||||
}
|
||||
|
||||
for _, vFky := range vTab.FKeyInfo {
|
||||
jbinFKey, err := json.Marshal(vFky)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
strBuilderResult += fmt.Sprintf(" FKEY: %s\n", string(jbinFKey))
|
||||
}
|
||||
|
||||
strBuilderResult += "}\n\n"
|
||||
}
|
||||
|
||||
return strBuilderResult, nil
|
||||
}
|
@@ -1,9 +1,13 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type StructScanMode string
|
||||
@@ -16,10 +20,79 @@ const (
|
||||
type StructScanSafety string
|
||||
|
||||
const (
|
||||
Safe StructScanSafety = "SAFE"
|
||||
Unsafe StructScanSafety = "UNSAFE"
|
||||
Safe StructScanSafety = "SAFE" // return error for missing fields
|
||||
Unsafe StructScanSafety = "UNSAFE" // ignore missing fields
|
||||
)
|
||||
|
||||
func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, v TData) (sql.Result, error) {
|
||||
|
||||
rval := reflect.ValueOf(v)
|
||||
rtyp := rval.Type()
|
||||
|
||||
columns := make([]string, 0)
|
||||
params := make([]string, 0)
|
||||
pp := PP{}
|
||||
|
||||
for i := 0; i < rtyp.NumField(); i++ {
|
||||
|
||||
rsfield := rtyp.Field(i)
|
||||
rvfield := rval.Field(i)
|
||||
|
||||
if !rsfield.IsExported() {
|
||||
continue
|
||||
}
|
||||
|
||||
columnName := rsfield.Tag.Get("db")
|
||||
if columnName == "" || columnName == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
paramkey := fmt.Sprintf("_%s", columnName)
|
||||
|
||||
columns = append(columns, "\""+columnName+"\"")
|
||||
params = append(params, ":"+paramkey)
|
||||
pp[paramkey] = rvfield.Interface()
|
||||
|
||||
}
|
||||
|
||||
sqlstr := fmt.Sprintf("INSERT"+" INTO \"%s\" (%s) VALUES (%s)", tableName, strings.Join(columns, ", "), strings.Join(params, ", "))
|
||||
|
||||
sqlr, err := q.Exec(ctx, sqlstr, pp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sqlr, nil
|
||||
}
|
||||
|
||||
func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) (TData, error) {
|
||||
rows, err := q.Query(ctx, sql, pp)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
data, err := ScanSingle[TData](rows, mode, sec, true)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func QueryAll[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) ([]TData, error) {
|
||||
rows, err := q.Query(ctx, sql, pp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, err := ScanAll[TData](rows, mode, sec, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) (TData, error) {
|
||||
if rows.Next() {
|
||||
var strscan *StructScanner
|
||||
|
Reference in New Issue
Block a user