Compare commits
	
		
			10 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 645113d553 | |||
| 4a33986b6a | |||
| c1c8c64c76 | |||
| 0927fdc4d7 | |||
| 102a280dda | |||
| f13384d794 | |||
| 409d6e108d | |||
| ed53f297bd | |||
| 42424f4bc2 | |||
| 9e5b8c5277 | 
							
								
								
									
										2
									
								
								.idea/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.idea/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -6,3 +6,5 @@ | ||||
| # Datasource local storage ignored files | ||||
| /dataSources/ | ||||
| /dataSources.local.xml | ||||
| # GitHub Copilot persisted chat sessions | ||||
| /copilot/chatSessions | ||||
|   | ||||
| @@ -30,7 +30,7 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | ||||
| | confext     | Mike       | Parses environment configuration into structs                                                                 | | ||||
| | cmdext      | Mike       | Runner for external commands/processes                                                                        | | ||||
| |             |            |                                                                                                               | | ||||
| | sq          | Mike       | Utility functions for sql based databases                                                                     | | ||||
| | sq          | Mike       | Utility functions for sql based databases (primarily sqlite)                                                  | | ||||
| | tst         | Mike       | Utility functions for unit tests                                                                              | | ||||
| |             |            |                                                                                                               | | ||||
| | rfctime     | Mike       | Classes for time seriallization, with different marshallign method for mongo and json                         | | ||||
|   | ||||
							
								
								
									
										6
									
								
								TODO.md
									
									
									
									
									
								
							
							
						
						
									
										6
									
								
								TODO.md
									
									
									
									
									
								
							| @@ -2,6 +2,8 @@ | ||||
|  | ||||
|  - cronext | ||||
|  | ||||
|  - rfctime.DateOnly | ||||
|  - rfctime.HMSTimeOnly | ||||
|  - rfctime.NanoTimeOnly | ||||
|  - rfctime.NanoTimeOnly | ||||
|  | ||||
|  - remove sqlx dependency from sq  (unmaintained, and mostly superseeded by our own stuff?) | ||||
|  - Move DBLogger and DBPreprocessor to sq | ||||
| @@ -26,6 +26,10 @@ type CSIDDef struct { | ||||
| 	Prefix       string | ||||
| } | ||||
|  | ||||
| type CSIDGenOptions struct { | ||||
| 	DebugOutput *bool | ||||
| } | ||||
|  | ||||
| var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||
|  | ||||
| var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`)) | ||||
| @@ -35,7 +39,9 @@ var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGen | ||||
| //go:embed csid-generate.template | ||||
| var templateCSIDGenerateText string | ||||
|  | ||||
| func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | ||||
| func GenerateCharsetIDSpecs(sourceDir string, destFile string, opt CSIDGenOptions) error { | ||||
|  | ||||
| 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||
|  | ||||
| 	files, err := os.ReadDir(sourceDir) | ||||
| 	if err != nil { | ||||
| @@ -81,13 +87,18 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | ||||
| 	pkgname := "" | ||||
|  | ||||
| 	for _, f := range files { | ||||
| 		fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name())) | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		} | ||||
|  | ||||
| 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
|  | ||||
| 		fmt.Printf("\n") | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("\n") | ||||
| 		} | ||||
|  | ||||
| 		allIDs = append(allIDs, fileIDs...) | ||||
|  | ||||
| @@ -113,7 +124,7 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | ||||
| func processCSIDFile(basedir string, fn string, debugOutput bool) ([]CSIDDef, string, error) { | ||||
| 	file, err := os.Open(fn) | ||||
| 	if err != nil { | ||||
| 		return nil, "", err | ||||
| @@ -155,7 +166,11 @@ func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | ||||
| 				Name:         match.GroupByName("name").Value(), | ||||
| 				Prefix:       match.GroupByName("prefix").Value(), | ||||
| 			} | ||||
| 			fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||
|  | ||||
| 			if debugOutput { | ||||
| 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||
| 			} | ||||
|  | ||||
| 			ids = append(ids, def) | ||||
| 		} | ||||
| 	} | ||||
|   | ||||
| @@ -34,10 +34,10 @@ func TestGenerateCSIDSpecs(t *testing.T) { | ||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | ||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | ||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	fmt.Println() | ||||
|   | ||||
| @@ -37,6 +37,11 @@ type EnumDef struct { | ||||
| 	Values       []EnumDefVal | ||||
| } | ||||
|  | ||||
| type EnumGenOptions struct { | ||||
| 	DebugOutput *bool | ||||
| 	GoFormat    *bool | ||||
| } | ||||
|  | ||||
| var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||
|  | ||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||
| @@ -48,7 +53,7 @@ var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerato | ||||
| //go:embed enum-generate.template | ||||
| var templateEnumGenerateText string | ||||
|  | ||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||
| func GenerateEnumSpecs(sourceDir string, destFile string, opt EnumGenOptions) error { | ||||
|  | ||||
| 	oldChecksum := "N/A" | ||||
| 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||
| @@ -61,7 +66,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, true) | ||||
| 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, langext.Coalesce(opt.GoFormat, true), langext.Coalesce(opt.DebugOutput, false)) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| @@ -78,7 +83,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool) (string, string, bool, error) { | ||||
| func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool, debugOutput bool) (string, string, bool, error) { | ||||
|  | ||||
| 	files, err := os.ReadDir(sourceDir) | ||||
| 	if err != nil { | ||||
| @@ -113,13 +118,18 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g | ||||
| 	pkgname := "" | ||||
|  | ||||
| 	for _, f := range files { | ||||
| 		fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name())) | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		} | ||||
|  | ||||
| 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||
| 		if err != nil { | ||||
| 			return "", "", false, err | ||||
| 		} | ||||
|  | ||||
| 		fmt.Printf("\n") | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("\n") | ||||
| 		} | ||||
|  | ||||
| 		allEnums = append(allEnums, fileEnums...) | ||||
|  | ||||
| @@ -146,7 +156,7 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g | ||||
| 	return string(fdata), newChecksum, true, nil | ||||
| } | ||||
|  | ||||
| func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | ||||
| func processEnumFile(basedir string, fn string, debugOutput bool) ([]EnumDef, string, error) { | ||||
| 	file, err := os.Open(fn) | ||||
| 	if err != nil { | ||||
| 		return nil, "", err | ||||
| @@ -190,7 +200,10 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | ||||
| 				Values:       make([]EnumDefVal, 0), | ||||
| 			} | ||||
| 			enums = append(enums, def) | ||||
| 			fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | ||||
|  | ||||
| 			if debugOutput { | ||||
| 				fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | ||||
| @@ -230,16 +243,21 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | ||||
| 				if v.EnumTypeName == typename { | ||||
| 					enums[i].Values = append(enums[i].Values, def) | ||||
| 					found = true | ||||
| 					if def.Description != nil { | ||||
| 						fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | ||||
| 					} else { | ||||
| 						fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | ||||
|  | ||||
| 					if debugOutput { | ||||
| 						if def.Description != nil { | ||||
| 							fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | ||||
| 						} else { | ||||
| 							fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | ||||
| 						} | ||||
| 					} | ||||
| 					break | ||||
| 				} | ||||
| 			} | ||||
| 			if !found { | ||||
| 				fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | ||||
| 				if debugOutput { | ||||
| 					fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|   | ||||
| @@ -7,6 +7,8 @@ import "gogs.mikescher.com/BlackForestBytes/goext/enums" | ||||
|  | ||||
| const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||
|  | ||||
| {{ $pkgname  := .PkgName }} | ||||
|  | ||||
| {{range .Enums}} | ||||
|  | ||||
| {{ $hasStr   := ( . | hasStr   ) }} | ||||
| @@ -97,6 +99,14 @@ func (e {{.EnumTypeName}}) VarName() string { | ||||
| 	return "" | ||||
| } | ||||
|  | ||||
| func (e {{.EnumTypeName}}) TypeName() string { | ||||
| 	return "{{$typename}}" | ||||
| } | ||||
|  | ||||
| func (e {{.EnumTypeName}}) PackageName() string { | ||||
| 	return "{{$pkgname }}" | ||||
| } | ||||
|  | ||||
| func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | ||||
|     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} | ||||
| } | ||||
| @@ -134,4 +144,12 @@ func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue { | ||||
| } | ||||
| {{end}} | ||||
|  | ||||
| {{end}} | ||||
| {{end}} | ||||
|  | ||||
| // ================================ ================= ================================ | ||||
|  | ||||
| func AllPackageEnums() []enums.Enum { | ||||
|     return []enums.Enum{ {{range .Enums}} | ||||
|         {{ if gt (len .Values) 0 }} {{  $v := index .Values 0 }} {{ $v.VarName}}, {{end}} // {{ .EnumTypeName }} {{end}} | ||||
|     } | ||||
| } | ||||
| @@ -37,10 +37,10 @@ func TestGenerateEnumSpecs(t *testing.T) { | ||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | ||||
| 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | ||||
| 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	tst.AssertEqual(t, cs1, cs2) | ||||
| @@ -76,7 +76,7 @@ func TestGenerateEnumSpecsData(t *testing.T) { | ||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true) | ||||
| 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true, true) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	fmt.Println() | ||||
|   | ||||
| @@ -25,6 +25,10 @@ type IDDef struct { | ||||
| 	Name         string | ||||
| } | ||||
|  | ||||
| type IDGenOptions struct { | ||||
| 	DebugOutput *bool | ||||
| } | ||||
|  | ||||
| var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||
|  | ||||
| var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`)) | ||||
| @@ -34,7 +38,9 @@ var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = | ||||
| //go:embed id-generate.template | ||||
| var templateIDGenerateText string | ||||
|  | ||||
| func GenerateIDSpecs(sourceDir string, destFile string) error { | ||||
| func GenerateIDSpecs(sourceDir string, destFile string, opt *IDGenOptions) error { | ||||
|  | ||||
| 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||
|  | ||||
| 	files, err := os.ReadDir(sourceDir) | ||||
| 	if err != nil { | ||||
| @@ -80,13 +86,18 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | ||||
| 	pkgname := "" | ||||
|  | ||||
| 	for _, f := range files { | ||||
| 		fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name())) | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		} | ||||
|  | ||||
| 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
|  | ||||
| 		fmt.Printf("\n") | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("\n") | ||||
| 		} | ||||
|  | ||||
| 		allIDs = append(allIDs, fileIDs...) | ||||
|  | ||||
| @@ -112,7 +123,7 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | ||||
| func processIDFile(basedir string, fn string, debugOutput bool) ([]IDDef, string, error) { | ||||
| 	file, err := os.Open(fn) | ||||
| 	if err != nil { | ||||
| 		return nil, "", err | ||||
| @@ -153,7 +164,11 @@ func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | ||||
| 				FileRelative: rfp, | ||||
| 				Name:         match.GroupByName("name").Value(), | ||||
| 			} | ||||
| 			fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||
|  | ||||
| 			if debugOutput { | ||||
| 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||
| 			} | ||||
|  | ||||
| 			ids = append(ids, def) | ||||
| 		} | ||||
| 	} | ||||
|   | ||||
| @@ -34,10 +34,10 @@ func TestGenerateIDSpecs(t *testing.T) { | ||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | ||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", &IDGenOptions{DebugOutput: langext.PTrue}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | ||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", &IDGenOptions{DebugOutput: langext.PTrue}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	fmt.Println() | ||||
|   | ||||
| @@ -5,6 +5,8 @@ type Enum interface { | ||||
| 	ValuesAny() []any | ||||
| 	ValuesMeta() []EnumMetaValue | ||||
| 	VarName() string | ||||
| 	TypeName() string | ||||
| 	PackageName() string | ||||
| } | ||||
|  | ||||
| type StringEnum interface { | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| package goext | ||||
|  | ||||
| const GoextVersion = "0.0.398" | ||||
| const GoextVersion = "0.0.408" | ||||
|  | ||||
| const GoextVersionTimestamp = "2024-03-09T13:36:06+0100" | ||||
| const GoextVersionTimestamp = "2024-03-11T16:41:47+0100" | ||||
|   | ||||
| @@ -9,6 +9,8 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| @@ -65,36 +67,20 @@ func (t *Date) UnmarshalJSON(data []byte) error { | ||||
| 	if err := json.Unmarshal(data, &str); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	t0, err := time.Parse(t.FormatStr(), str) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	t.Year = t0.Year() | ||||
| 	t.Month = int(t0.Month()) | ||||
| 	t.Day = t0.Day() | ||||
| 	return nil | ||||
| 	return t.ParseString(str) | ||||
| } | ||||
|  | ||||
| func (t Date) MarshalJSON() ([]byte, error) { | ||||
| 	str := t.TimeUTC().Format(t.FormatStr()) | ||||
| 	str := t.String() | ||||
| 	return json.Marshal(str) | ||||
| } | ||||
|  | ||||
| func (t Date) MarshalText() ([]byte, error) { | ||||
| 	b := make([]byte, 0, len(t.FormatStr())) | ||||
| 	return t.TimeUTC().AppendFormat(b, t.FormatStr()), nil | ||||
| 	return []byte(t.String()), nil | ||||
| } | ||||
|  | ||||
| func (t *Date) UnmarshalText(data []byte) error { | ||||
| 	var err error | ||||
| 	v, err := time.Parse(t.FormatStr(), string(data)) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	t.Year = v.Year() | ||||
| 	t.Month = int(v.Month()) | ||||
| 	t.Day = v.Day() | ||||
| 	return nil | ||||
| 	return t.ParseString(string(data)) | ||||
| } | ||||
|  | ||||
| func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||
| @@ -164,7 +150,7 @@ func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val | ||||
| } | ||||
|  | ||||
| func (t Date) Serialize() string { | ||||
| 	return t.TimeUTC().Format(t.FormatStr()) | ||||
| 	return t.String() | ||||
| } | ||||
|  | ||||
| func (t Date) FormatStr() string { | ||||
| @@ -212,11 +198,48 @@ func (t Date) Format(layout string) string { | ||||
| } | ||||
|  | ||||
| func (t Date) GoString() string { | ||||
| 	return t.TimeUTC().GoString() | ||||
| 	return fmt.Sprintf("rfctime.Date{Year: %d, Month: %d, Day: %d}", t.Year, t.Month, t.Day) | ||||
| } | ||||
|  | ||||
| func (t Date) String() string { | ||||
| 	return t.TimeUTC().String() | ||||
| 	return fmt.Sprintf("%04d-%02d-%02d", t.Year, t.Month, t.Day) | ||||
| } | ||||
|  | ||||
| func (t *Date) ParseString(v string) error { | ||||
| 	split := strings.Split(v, "-") | ||||
| 	if len(split) != 3 { | ||||
| 		return errors.New("invalid date format: " + v) | ||||
| 	} | ||||
| 	year, err := strconv.ParseInt(split[0], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
| 	month, err := strconv.ParseInt(split[1], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
| 	day, err := strconv.ParseInt(split[2], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
|  | ||||
| 	if year < 0 { | ||||
| 		return errors.New("invalid date format: " + v + ": year is negative") | ||||
| 	} | ||||
|  | ||||
| 	if month < 1 || month > 12 { | ||||
| 		return errors.New("invalid date format: " + v + ": month is out of range") | ||||
| 	} | ||||
|  | ||||
| 	if day < 1 || day > 31 { | ||||
| 		return errors.New("invalid date format: " + v + ": day is out of range") | ||||
| 	} | ||||
|  | ||||
| 	t.Year = int(year) | ||||
| 	t.Month = int(month) | ||||
| 	t.Day = int(day) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func NewDate(t time.Time) Date { | ||||
|   | ||||
| @@ -52,8 +52,7 @@ func TestCreateUpdateStatement(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|   | ||||
							
								
								
									
										32
									
								
								sq/commentTrimmer.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								sq/commentTrimmer.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| var CommentTrimmer = NewPreListener(fnTrimComments) | ||||
|  | ||||
| func fnTrimComments(ctx context.Context, cmdtype string, id *uint16, sql *string, params *PP) error { | ||||
|  | ||||
| 	res := make([]string, 0) | ||||
|  | ||||
| 	for _, s := range strings.Split(*sql, "\n") { | ||||
| 		if strings.HasPrefix(strings.TrimSpace(s), "--") { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		idx := strings.Index(s, "--") | ||||
| 		if idx != -1 { | ||||
| 			s = s[:idx] | ||||
| 		} | ||||
|  | ||||
| 		s = strings.TrimRight(s, " \t\r\n") | ||||
|  | ||||
| 		res = append(res, s) | ||||
| 	} | ||||
|  | ||||
| 	*sql = strings.Join(res, "\n") | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										187
									
								
								sq/converter.go
									
									
									
									
									
								
							
							
						
						
									
										187
									
								
								sq/converter.go
									
									
									
									
									
								
							| @@ -1,17 +1,10 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"encoding/json" | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type DBTypeConverter interface { | ||||
| @@ -21,169 +14,16 @@ type DBTypeConverter interface { | ||||
| 	DBToModel(v any) (any, error) | ||||
| } | ||||
|  | ||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) { | ||||
| 	return langext.Conditional(v, int64(1), int64(0)), nil | ||||
| }, func(v int64) (bool, error) { | ||||
| 	if v == 0 { | ||||
| 		return false, nil | ||||
| 	} | ||||
| 	if v == 1 { | ||||
| 		return true, nil | ||||
| 	} | ||||
| 	return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v)) | ||||
| }) | ||||
| type DBDataConstraint interface { | ||||
| 	string | langext.NumberConstraint | []byte | ||||
| } | ||||
|  | ||||
| var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (time.Time, error) { | ||||
| 	return time.UnixMilli(v), nil | ||||
| }) | ||||
| type DatabaseConvertible[TModelData any, TDBData DBDataConstraint] interface { | ||||
| 	MarshalToDB(v TModelData) (TDBData, error) | ||||
| 	UnmarshalToModel(v TDBData) (TModelData, error) | ||||
| } | ||||
|  | ||||
| var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (rfctime.UnixMilliTime, error) { | ||||
| 	return rfctime.NewUnixMilli(time.UnixMilli(v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) { | ||||
| 	return v.UnixNano(), nil | ||||
| }, func(v int64) (rfctime.UnixNanoTime, error) { | ||||
| 	return rfctime.NewUnixNano(time.Unix(0, v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) { | ||||
| 	return v.Unix(), nil | ||||
| }, func(v int64) (rfctime.UnixTime, error) { | ||||
| 	return rfctime.NewUnix(time.Unix(v, 0)), nil | ||||
| }) | ||||
|  | ||||
| // ConverterRFC339TimeToString | ||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||
| var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) { | ||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil | ||||
| }, func(v string) (rfctime.RFC3339Time, error) { | ||||
| 	t, err := time.Parse("2006-01-02 15:04:05", v) | ||||
| 	if err != nil { | ||||
| 		return rfctime.RFC3339Time{}, err | ||||
| 	} | ||||
| 	return rfctime.NewRFC3339(t), nil | ||||
| }) | ||||
|  | ||||
| // ConverterRFC339NanoTimeToString | ||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||
| var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) { | ||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil | ||||
| }, func(v string) (rfctime.RFC3339NanoTime, error) { | ||||
| 	t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC) | ||||
| 	if err != nil { | ||||
| 		return rfctime.RFC3339NanoTime{}, err | ||||
| 	} | ||||
| 	return rfctime.NewRFC3339Nano(t), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) { | ||||
| 	return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil | ||||
| }, func(v string) (rfctime.Date, error) { | ||||
| 	split := strings.Split(v, "-") | ||||
| 	if len(split) != 3 { | ||||
| 		return rfctime.Date{}, errors.New("invalid date format: " + v) | ||||
| 	} | ||||
| 	year, err := strconv.ParseInt(split[0], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
| 	month, err := strconv.ParseInt(split[0], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
| 	day, err := strconv.ParseInt(split[0], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
|  | ||||
| 	return rfctime.Date{Year: int(year), Month: int(month), Day: int(day)}, nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) { | ||||
| 	return v.SerializeShort(), nil | ||||
| }, func(v string) (rfctime.Time, error) { | ||||
| 	res := rfctime.Time{} | ||||
| 	err := res.Deserialize(v) | ||||
| 	if err != nil { | ||||
| 		return rfctime.Time{}, err | ||||
| 	} | ||||
| 	return res, nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCSecondsF64ToString = NewDBTypeConverter[rfctime.SecondsF64, float64](func(v rfctime.SecondsF64) (float64, error) { | ||||
| 	return v.Seconds(), nil | ||||
| }, func(v float64) (rfctime.SecondsF64, error) { | ||||
| 	return rfctime.NewSecondsF64(timeext.FromSeconds(v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterJsonObjToString = NewDBTypeConverter[JsonObj, string](func(v JsonObj) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| }, func(v string) (JsonObj, error) { | ||||
| 	var mrsh JsonObj | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return JsonObj{}, err | ||||
| 	} | ||||
| 	return mrsh, nil | ||||
| }) | ||||
|  | ||||
| var ConverterJsonArrToString = NewDBTypeConverter[JsonArr, string](func(v JsonArr) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| }, func(v string) (JsonArr, error) { | ||||
| 	var mrsh JsonArr | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return JsonArr{}, err | ||||
| 	} | ||||
| 	return mrsh, nil | ||||
| }) | ||||
|  | ||||
| var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) { | ||||
| 	return v.Category, nil | ||||
| }, func(v string) (exerr.ErrorCategory, error) { | ||||
| 	for _, cat := range exerr.AllCategories { | ||||
| 		if cat.Category == v { | ||||
| 			return cat, nil | ||||
| 		} | ||||
| 	} | ||||
| 	return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory") | ||||
| }) | ||||
|  | ||||
| var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) { | ||||
| 	return v.Severity, nil | ||||
| }, func(v string) (exerr.ErrorSeverity, error) { | ||||
| 	for _, sev := range exerr.AllSeverities { | ||||
| 		if sev.Severity == v { | ||||
| 			return sev, nil | ||||
| 		} | ||||
| 	} | ||||
| 	return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity") | ||||
| }) | ||||
|  | ||||
| var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) { | ||||
| 	return v.Key, nil | ||||
| }, func(v string) (exerr.ErrorType, error) { | ||||
| 	for _, etp := range exerr.ListRegisteredTypes() { | ||||
| 		if etp.Key == v { | ||||
| 			return etp, nil | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return exerr.NewType(v, nil), nil | ||||
| }) | ||||
|  | ||||
| type dbTypeConverterImpl[TModelData any, TDBData any] struct { | ||||
| type dbTypeConverterImpl[TModelData any, TDBData DBDataConstraint] struct { | ||||
| 	dbTypeString    string | ||||
| 	modelTypeString string | ||||
| 	todb            func(v TModelData) (TDBData, error) | ||||
| @@ -212,7 +52,7 @@ func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error) | ||||
| 	return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v)) | ||||
| } | ||||
|  | ||||
| func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter { | ||||
| func NewDBTypeConverter[TModelData any, TDBData DBDataConstraint](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter { | ||||
| 	return &dbTypeConverterImpl[TModelData, TDBData]{ | ||||
| 		dbTypeString:    fmt.Sprintf("%T", *new(TDBData)), | ||||
| 		modelTypeString: fmt.Sprintf("%T", *new(TModelData)), | ||||
| @@ -221,6 +61,15 @@ func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TD | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewAutoDBTypeConverter[TDBData DBDataConstraint, TModelData DatabaseConvertible[TModelData, TDBData]](obj TModelData) DBTypeConverter { | ||||
| 	return &dbTypeConverterImpl[TModelData, TDBData]{ | ||||
| 		dbTypeString:    fmt.Sprintf("%T", *new(TDBData)), | ||||
| 		modelTypeString: fmt.Sprintf("%T", *new(TModelData)), | ||||
| 		todb:            obj.MarshalToDB, | ||||
| 		tomodel:         obj.UnmarshalToModel, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func convertValueToDB(q Queryable, value any) (any, error) { | ||||
| 	modelTypeStr := fmt.Sprintf("%T", value) | ||||
|  | ||||
|   | ||||
							
								
								
									
										147
									
								
								sq/converterDefault.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										147
									
								
								sq/converterDefault.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,147 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| // ========================== COMMON DATATYPES ========================== | ||||
|  | ||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) { | ||||
| 	return langext.Conditional(v, int64(1), int64(0)), nil | ||||
| }, func(v int64) (bool, error) { | ||||
| 	if v == 0 { | ||||
| 		return false, nil | ||||
| 	} | ||||
| 	if v == 1 { | ||||
| 		return true, nil | ||||
| 	} | ||||
| 	return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v)) | ||||
| }) | ||||
|  | ||||
| var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (time.Time, error) { | ||||
| 	return time.UnixMilli(v), nil | ||||
| }) | ||||
|  | ||||
| // ========================== RFCTIME ========================== | ||||
|  | ||||
| var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (rfctime.UnixMilliTime, error) { | ||||
| 	return rfctime.NewUnixMilli(time.UnixMilli(v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) { | ||||
| 	return v.UnixNano(), nil | ||||
| }, func(v int64) (rfctime.UnixNanoTime, error) { | ||||
| 	return rfctime.NewUnixNano(time.Unix(0, v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) { | ||||
| 	return v.Unix(), nil | ||||
| }, func(v int64) (rfctime.UnixTime, error) { | ||||
| 	return rfctime.NewUnix(time.Unix(v, 0)), nil | ||||
| }) | ||||
|  | ||||
| // ConverterRFC339TimeToString | ||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||
| var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) { | ||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil | ||||
| }, func(v string) (rfctime.RFC3339Time, error) { | ||||
| 	t, err := time.Parse("2006-01-02 15:04:05", v) | ||||
| 	if err != nil { | ||||
| 		return rfctime.RFC3339Time{}, err | ||||
| 	} | ||||
| 	return rfctime.NewRFC3339(t), nil | ||||
| }) | ||||
|  | ||||
| // ConverterRFC339NanoTimeToString | ||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||
| var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) { | ||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil | ||||
| }, func(v string) (rfctime.RFC3339NanoTime, error) { | ||||
| 	t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC) | ||||
| 	if err != nil { | ||||
| 		return rfctime.RFC3339NanoTime{}, err | ||||
| 	} | ||||
| 	return rfctime.NewRFC3339Nano(t), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) { | ||||
| 	return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil | ||||
| }, func(v string) (rfctime.Date, error) { | ||||
| 	d := rfctime.Date{} | ||||
| 	if err := d.ParseString(v); err != nil { | ||||
| 		return rfctime.Date{}, err | ||||
| 	} else { | ||||
| 		return d, nil | ||||
| 	} | ||||
| }) | ||||
|  | ||||
| var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) { | ||||
| 	return v.SerializeShort(), nil | ||||
| }, func(v string) (rfctime.Time, error) { | ||||
| 	res := rfctime.Time{} | ||||
| 	err := res.Deserialize(v) | ||||
| 	if err != nil { | ||||
| 		return rfctime.Time{}, err | ||||
| 	} | ||||
| 	return res, nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCSecondsF64ToString = NewDBTypeConverter[rfctime.SecondsF64, float64](func(v rfctime.SecondsF64) (float64, error) { | ||||
| 	return v.Seconds(), nil | ||||
| }, func(v float64) (rfctime.SecondsF64, error) { | ||||
| 	return rfctime.NewSecondsF64(timeext.FromSeconds(v)), nil | ||||
| }) | ||||
|  | ||||
| // ========================== JSON ========================== | ||||
|  | ||||
| var ConverterJsonObjToString = NewAutoDBTypeConverter(JsonObj{}) | ||||
|  | ||||
| var ConverterJsonArrToString = NewAutoDBTypeConverter(JsonArr{}) | ||||
|  | ||||
| // Json[T] must be registered manually for each gen-type | ||||
|  | ||||
| // ========================== EXERR ========================== | ||||
|  | ||||
| var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) { | ||||
| 	return v.Category, nil | ||||
| }, func(v string) (exerr.ErrorCategory, error) { | ||||
| 	for _, cat := range exerr.AllCategories { | ||||
| 		if cat.Category == v { | ||||
| 			return cat, nil | ||||
| 		} | ||||
| 	} | ||||
| 	return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory") | ||||
| }) | ||||
|  | ||||
| var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) { | ||||
| 	return v.Severity, nil | ||||
| }, func(v string) (exerr.ErrorSeverity, error) { | ||||
| 	for _, sev := range exerr.AllSeverities { | ||||
| 		if sev.Severity == v { | ||||
| 			return sev, nil | ||||
| 		} | ||||
| 	} | ||||
| 	return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity") | ||||
| }) | ||||
|  | ||||
| var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) { | ||||
| 	return v.Key, nil | ||||
| }, func(v string) (exerr.ErrorType, error) { | ||||
| 	for _, etp := range exerr.ListRegisteredTypes() { | ||||
| 		if etp.Key == v { | ||||
| 			return etp, nil | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return exerr.NewType(v, nil), nil | ||||
| }) | ||||
| @@ -17,7 +17,11 @@ type DB interface { | ||||
| 	AddListener(listener Listener) | ||||
| 	Exit() error | ||||
| 	RegisterConverter(DBTypeConverter) | ||||
| 	RegisterDefaultConverter() | ||||
| } | ||||
|  | ||||
| type DBOptions struct { | ||||
| 	RegisterDefaultConverter *bool | ||||
| 	RegisterCommentTrimmer   *bool | ||||
| } | ||||
|  | ||||
| type database struct { | ||||
| @@ -28,13 +32,23 @@ type database struct { | ||||
| 	conv  []DBTypeConverter | ||||
| } | ||||
|  | ||||
| func NewDB(db *sqlx.DB) DB { | ||||
| 	return &database{ | ||||
| func NewDB(db *sqlx.DB, opt DBOptions) DB { | ||||
| 	sqdb := &database{ | ||||
| 		db:    db, | ||||
| 		txctr: 0, | ||||
| 		lock:  sync.Mutex{}, | ||||
| 		lstr:  make([]Listener, 0), | ||||
| 	} | ||||
|  | ||||
| 	if langext.Coalesce(opt.RegisterDefaultConverter, true) { | ||||
| 		sqdb.registerDefaultConverter() | ||||
| 	} | ||||
|  | ||||
| 	if langext.Coalesce(opt.RegisterCommentTrimmer, true) { | ||||
| 		sqdb.AddListener(CommentTrimmer) | ||||
| 	} | ||||
|  | ||||
| 	return sqdb | ||||
| } | ||||
|  | ||||
| func (db *database) AddListener(listener Listener) { | ||||
| @@ -141,7 +155,7 @@ func (db *database) RegisterConverter(conv DBTypeConverter) { | ||||
| 	db.conv = append(db.conv, conv) | ||||
| } | ||||
|  | ||||
| func (db *database) RegisterDefaultConverter() { | ||||
| func (db *database) registerDefaultConverter() { | ||||
| 	db.RegisterConverter(ConverterBoolToBit) | ||||
|  | ||||
| 	db.RegisterConverter(ConverterTimeToUnixMillis) | ||||
|   | ||||
| @@ -47,3 +47,10 @@ func NewSimplePaginateFilter(filterClause string, filterParams PP, sort []Filter | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewEmptyPaginateFilter() PaginateFilter { | ||||
| 	return genericPaginateFilter{ | ||||
| 		sql:  func(params PP) (string, string, []string) { return "1=1", "", nil }, | ||||
| 		sort: func() []FilterSort { return make([]FilterSort, 0) }, | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -31,7 +31,7 @@ func HashMattnSqliteSchema(ctx context.Context, schemaStr string) (string, error | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db := NewDB(xdb, DBOptions{}) | ||||
|  | ||||
| 	_, err = db.Exec(ctx, schemaStr, PP{}) | ||||
| 	if err != nil { | ||||
| @@ -59,7 +59,7 @@ func HashGoSqliteSchema(ctx context.Context, schemaStr string) (string, error) { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db := NewDB(xdb, DBOptions{}) | ||||
|  | ||||
| 	_, err = db.Exec(ctx, schemaStr, PP{}) | ||||
| 	if err != nil { | ||||
|   | ||||
							
								
								
									
										54
									
								
								sq/json.go
									
									
									
									
									
								
							
							
						
						
									
										54
									
								
								sq/json.go
									
									
									
									
									
								
							| @@ -1,5 +1,59 @@ | ||||
| package sq | ||||
|  | ||||
| import "encoding/json" | ||||
|  | ||||
| type JsonObj map[string]any | ||||
|  | ||||
| func (j JsonObj) MarshalToDB(v JsonObj) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| } | ||||
|  | ||||
| func (j JsonObj) UnmarshalToModel(v string) (JsonObj, error) { | ||||
| 	var mrsh JsonObj | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return JsonObj{}, err | ||||
| 	} | ||||
| 	return mrsh, nil | ||||
| } | ||||
|  | ||||
| type JsonArr []any | ||||
|  | ||||
| func (j JsonArr) MarshalToDB(v JsonArr) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| } | ||||
|  | ||||
| func (j JsonArr) UnmarshalToModel(v string) (JsonArr, error) { | ||||
| 	var mrsh JsonArr | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return JsonArr{}, err | ||||
| 	} | ||||
| 	return mrsh, nil | ||||
| } | ||||
|  | ||||
| type AutoJson[T any] struct { | ||||
| 	Value T | ||||
| } | ||||
|  | ||||
| func (j AutoJson[T]) MarshalToDB(v AutoJson[T]) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v.Value) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| } | ||||
|  | ||||
| func (j AutoJson[T]) UnmarshalToModel(v string) (AutoJson[T], error) { | ||||
| 	mrsh := *new(T) | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return AutoJson[T]{}, err | ||||
| 	} | ||||
| 	return AutoJson[T]{Value: mrsh}, nil | ||||
| } | ||||
|   | ||||
							
								
								
									
										48
									
								
								sq/list.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								sq/list.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| ) | ||||
|  | ||||
| func Iterate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int, consumer func(ctx context.Context, v TData) error) (int, error) { | ||||
| 	if filter == nil { | ||||
| 		filter = NewEmptyPaginateFilter() | ||||
| 	} | ||||
|  | ||||
| 	prepParams := PP{} | ||||
|  | ||||
| 	sortOrder := filter.Sort() | ||||
| 	sortCond := "" | ||||
| 	if len(sortOrder) > 0 { | ||||
| 		sortCond = "ORDER BY " | ||||
| 		for i, v := range sortOrder { | ||||
| 			if i > 0 { | ||||
| 				sortCond += ", " | ||||
| 			} | ||||
| 			sortCond += v.Field + " " + string(v.Direction) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	pageCond := "" | ||||
| 	if limit != nil { | ||||
| 		pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1))) | ||||
| 	} | ||||
|  | ||||
| 	filterCond, joinCond, joinTables := filter.SQL(prepParams) | ||||
|  | ||||
| 	selectCond := table + ".*" | ||||
| 	for _, v := range joinTables { | ||||
| 		selectCond += ", " + v + ".*" | ||||
| 	} | ||||
|  | ||||
| 	sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond | ||||
|  | ||||
| 	rows, err := q.Query(ctx, sqlQueryData, prepParams) | ||||
| 	if err != nil { | ||||
| 		return 0, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build() | ||||
| 	} | ||||
|  | ||||
| 	return IterateAll[TData](ctx, q, rows, scanMode, scanSec, true, consumer) | ||||
| } | ||||
							
								
								
									
										169
									
								
								sq/listener.go
									
									
									
									
									
								
							
							
						
						
									
										169
									
								
								sq/listener.go
									
									
									
									
									
								
							| @@ -17,3 +17,172 @@ type Listener interface { | ||||
| 	PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| 	PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| } | ||||
|  | ||||
| type genListener struct { | ||||
| 	prePing        func(ctx context.Context) error | ||||
| 	preTxBegin     func(ctx context.Context, txid uint16) error | ||||
| 	preTxCommit    func(txid uint16) error | ||||
| 	preTxRollback  func(txid uint16) error | ||||
| 	preQuery       func(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||
| 	preExec        func(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||
| 	postPing       func(result error) | ||||
| 	postTxBegin    func(txid uint16, result error) | ||||
| 	postTxCommit   func(txid uint16, result error) | ||||
| 	postTxRollback func(txid uint16, result error) | ||||
| 	postQuery      func(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| 	postExec       func(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| } | ||||
|  | ||||
| func (g genListener) PrePing(ctx context.Context) error { | ||||
| 	if g.prePing != nil { | ||||
| 		return g.prePing(ctx) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error { | ||||
| 	if g.preTxBegin != nil { | ||||
| 		return g.preTxBegin(ctx, txid) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxCommit(txid uint16) error { | ||||
| 	if g.preTxCommit != nil { | ||||
| 		return g.preTxCommit(txid) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxRollback(txid uint16) error { | ||||
| 	if g.preTxRollback != nil { | ||||
| 		return g.preTxRollback(txid) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 	if g.preQuery != nil { | ||||
| 		return g.preQuery(ctx, txID, sql, params) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 	if g.preExec != nil { | ||||
| 		return g.preExec(ctx, txID, sql, params) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostPing(result error) { | ||||
| 	if g.postPing != nil { | ||||
| 		g.postPing(result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxBegin(txid uint16, result error) { | ||||
| 	if g.postTxBegin != nil { | ||||
| 		g.postTxBegin(txid, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxCommit(txid uint16, result error) { | ||||
| 	if g.postTxCommit != nil { | ||||
| 		g.postTxCommit(txid, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxRollback(txid uint16, result error) { | ||||
| 	if g.postTxRollback != nil { | ||||
| 		g.postTxRollback(txid, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 	if g.postQuery != nil { | ||||
| 		g.postQuery(txID, sqlOriginal, sqlReal, params) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 	if g.postExec != nil { | ||||
| 		g.postExec(txID, sqlOriginal, sqlReal, params) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewPrePingListener(f func(ctx context.Context) error) Listener { | ||||
| 	return genListener{prePing: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxBeginListener(f func(ctx context.Context, txid uint16) error) Listener { | ||||
| 	return genListener{preTxBegin: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxCommitListener(f func(txid uint16) error) Listener { | ||||
| 	return genListener{preTxCommit: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxRollbackListener(f func(txid uint16) error) Listener { | ||||
| 	return genListener{preTxRollback: f} | ||||
| } | ||||
|  | ||||
| func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| 	return genListener{preQuery: f} | ||||
| } | ||||
|  | ||||
| func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| 	return genListener{preExec: f} | ||||
| } | ||||
|  | ||||
| func NewPreListener(f func(ctx context.Context, cmdtype string, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| 	return genListener{ | ||||
| 		preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 			return f(ctx, "EXEC", txID, sql, params) | ||||
| 		}, | ||||
| 		preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 			return f(ctx, "QUERY", txID, sql, params) | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewPostPingListener(f func(result error)) Listener { | ||||
| 	return genListener{postPing: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxBeginListener(f func(txid uint16, result error)) Listener { | ||||
| 	return genListener{postTxBegin: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxCommitListener(f func(txid uint16, result error)) Listener { | ||||
| 	return genListener{postTxCommit: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxRollbackListener(f func(txid uint16, result error)) Listener { | ||||
| 	return genListener{postTxRollback: f} | ||||
| } | ||||
|  | ||||
| func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| 	return genListener{postQuery: f} | ||||
| } | ||||
|  | ||||
| func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| 	return genListener{postExec: f} | ||||
| } | ||||
|  | ||||
| func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| 	return genListener{ | ||||
| 		postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 			f("EXEC", txID, sqlOriginal, sqlReal, params) | ||||
| 		}, | ||||
| 		postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 			f("QUERY", txID, sqlOriginal, sqlReal, params) | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -9,6 +9,10 @@ import ( | ||||
| ) | ||||
|  | ||||
| func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||
| 	if filter == nil { | ||||
| 		filter = NewEmptyPaginateFilter() | ||||
| 	} | ||||
|  | ||||
| 	prepParams := PP{} | ||||
|  | ||||
| 	sortOrder := filter.Sort() | ||||
| @@ -90,6 +94,10 @@ func Paginate[TData any](ctx context.Context, q Queryable, table string, filter | ||||
| } | ||||
|  | ||||
| func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | ||||
| 	if filter == nil { | ||||
| 		filter = NewEmptyPaginateFilter() | ||||
| 	} | ||||
|  | ||||
| 	prepParams := PP{} | ||||
|  | ||||
| 	filterCond, joinCond, _ := filter.SQL(prepParams) | ||||
|   | ||||
| @@ -333,3 +333,79 @@ func ScanAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode | ||||
| 	} | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func IterateAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool, consumer func(ctx context.Context, v TData) error) (int, error) { | ||||
| 	var strscan *StructScanner | ||||
|  | ||||
| 	if sec == Safe { | ||||
| 		strscan = NewStructScanner(rows, false) | ||||
| 		var data TData | ||||
| 		err := strscan.Start(&data) | ||||
| 		if err != nil { | ||||
| 			return 0, err | ||||
| 		} | ||||
| 	} else if sec == Unsafe { | ||||
| 		strscan = NewStructScanner(rows, true) | ||||
| 		var data TData | ||||
| 		err := strscan.Start(&data) | ||||
| 		if err != nil { | ||||
| 			return 0, err | ||||
| 		} | ||||
| 	} else { | ||||
| 		return 0, errors.New("unknown value for <sec>") | ||||
| 	} | ||||
|  | ||||
| 	rcount := 0 | ||||
|  | ||||
| 	for rows.Next() { | ||||
|  | ||||
| 		if err := ctx.Err(); err != nil { | ||||
| 			return rcount, err | ||||
| 		} | ||||
|  | ||||
| 		if mode == SModeFast { | ||||
| 			var data TData | ||||
| 			err := strscan.StructScanBase(&data) | ||||
| 			if err != nil { | ||||
| 				return rcount, err | ||||
| 			} | ||||
|  | ||||
| 			err = consumer(ctx, data) | ||||
| 			if err != nil { | ||||
| 				return rcount, exerr.Wrap(err, "").Build() | ||||
| 			} | ||||
|  | ||||
| 			rcount++ | ||||
|  | ||||
| 		} else if mode == SModeExtended { | ||||
| 			var data TData | ||||
| 			err := strscan.StructScanExt(q, &data) | ||||
| 			if err != nil { | ||||
| 				return rcount, err | ||||
| 			} | ||||
|  | ||||
| 			err = consumer(ctx, data) | ||||
| 			if err != nil { | ||||
| 				return rcount, exerr.Wrap(err, "").Build() | ||||
| 			} | ||||
|  | ||||
| 			rcount++ | ||||
|  | ||||
| 		} else { | ||||
| 			return rcount, errors.New("unknown value for <mode>") | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if close { | ||||
| 		err := strscan.rows.Close() | ||||
| 		if err != nil { | ||||
| 			return rcount, err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := rows.Err(); err != nil { | ||||
| 		return rcount, err | ||||
| 	} | ||||
|  | ||||
| 	return rcount, nil | ||||
| } | ||||
|   | ||||
| @@ -36,8 +36,7 @@ func TestInsertSingle(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, ` | ||||
| 		CREATE TABLE requests (  | ||||
| @@ -90,8 +89,7 @@ func TestUpdateSingle(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, ` | ||||
| 		CREATE TABLE requests (  | ||||
| @@ -176,8 +174,7 @@ func TestInsertMultiple(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, ` | ||||
| 		CREATE TABLE requests (  | ||||
|   | ||||
| @@ -36,8 +36,7 @@ func TestTypeConverter1(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
| @@ -71,8 +70,7 @@ func TestTypeConverter2(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
| @@ -116,8 +114,7 @@ func TestTypeConverter3(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|   | ||||
		Reference in New Issue
	
	Block a user