Compare commits
	
		
			23 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 4a33986b6a | |||
| c1c8c64c76 | |||
| 0927fdc4d7 | |||
| 102a280dda | |||
| f13384d794 | |||
| 409d6e108d | |||
| ed53f297bd | |||
| 42424f4bc2 | |||
| 9e5b8c5277 | |||
| 9abe28c490 | |||
| 422bbd8593 | |||
| 3956675e04 | |||
| 10c3780b52 | |||
| 8edc067a3b | |||
| 1007f2c834 | |||
| c25da03217 | |||
| 4b55dbaacf | |||
| c399fa42ae | |||
| 9e586f7706 | |||
| 3cc8dccc63 | |||
| 7fedfbca81 | |||
| 3c439ba428 | |||
| ad24f6db44 | 
							
								
								
									
										2
									
								
								.idea/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.idea/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -6,3 +6,5 @@ | |||||||
| # Datasource local storage ignored files | # Datasource local storage ignored files | ||||||
| /dataSources/ | /dataSources/ | ||||||
| /dataSources.local.xml | /dataSources.local.xml | ||||||
|  | # GitHub Copilot persisted chat sessions | ||||||
|  | /copilot/chatSessions | ||||||
|   | |||||||
| @@ -30,7 +30,7 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | |||||||
| | confext     | Mike       | Parses environment configuration into structs                                                                 | | | confext     | Mike       | Parses environment configuration into structs                                                                 | | ||||||
| | cmdext      | Mike       | Runner for external commands/processes                                                                        | | | cmdext      | Mike       | Runner for external commands/processes                                                                        | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | sq          | Mike       | Utility functions for sql based databases                                                                     | | | sq          | Mike       | Utility functions for sql based databases (primarily sqlite)                                                  | | ||||||
| | tst         | Mike       | Utility functions for unit tests                                                                              | | | tst         | Mike       | Utility functions for unit tests                                                                              | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | rfctime     | Mike       | Classes for time seriallization, with different marshallign method for mongo and json                         | | | rfctime     | Mike       | Classes for time seriallization, with different marshallign method for mongo and json                         | | ||||||
|   | |||||||
							
								
								
									
										4
									
								
								TODO.md
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								TODO.md
									
									
									
									
									
								
							| @@ -2,6 +2,8 @@ | |||||||
|  |  | ||||||
|  - cronext |  - cronext | ||||||
|  |  | ||||||
|  - rfctime.DateOnly |  | ||||||
|  - rfctime.HMSTimeOnly |  - rfctime.HMSTimeOnly | ||||||
|  - rfctime.NanoTimeOnly |  - rfctime.NanoTimeOnly | ||||||
|  |  | ||||||
|  |  - remove sqlx dependency from sq  (unmaintained, and mostly superseeded by our own stuff?) | ||||||
|  |  - Move DBLogger and DBPreprocessor to sq | ||||||
| @@ -26,6 +26,10 @@ type CSIDDef struct { | |||||||
| 	Prefix       string | 	Prefix       string | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type CSIDGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | } | ||||||
|  |  | ||||||
| var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
| var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`)) | var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`)) | ||||||
| @@ -35,7 +39,9 @@ var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGen | |||||||
| //go:embed csid-generate.template | //go:embed csid-generate.template | ||||||
| var templateCSIDGenerateText string | var templateCSIDGenerateText string | ||||||
|  |  | ||||||
| func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | func GenerateCharsetIDSpecs(sourceDir string, destFile string, opt CSIDGenOptions) error { | ||||||
|  |  | ||||||
|  | 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -81,13 +87,18 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	pkgname := "" | 	pkgname := "" | ||||||
|  |  | ||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
| 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name())) | 		} | ||||||
|  |  | ||||||
|  | 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("\n") | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		allIDs = append(allIDs, fileIDs...) | 		allIDs = append(allIDs, fileIDs...) | ||||||
|  |  | ||||||
| @@ -113,7 +124,7 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | func processCSIDFile(basedir string, fn string, debugOutput bool) ([]CSIDDef, string, error) { | ||||||
| 	file, err := os.Open(fn) | 	file, err := os.Open(fn) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, "", err | 		return nil, "", err | ||||||
| @@ -155,7 +166,11 @@ func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | |||||||
| 				Name:         match.GroupByName("name").Value(), | 				Name:         match.GroupByName("name").Value(), | ||||||
| 				Prefix:       match.GroupByName("prefix").Value(), | 				Prefix:       match.GroupByName("prefix").Value(), | ||||||
| 			} | 			} | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
| 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||||
|  | 			} | ||||||
|  |  | ||||||
| 			ids = append(ids, def) | 			ids = append(ids, def) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -34,10 +34,10 @@ func TestGenerateCSIDSpecs(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
|   | |||||||
| @@ -37,6 +37,11 @@ type EnumDef struct { | |||||||
| 	Values       []EnumDefVal | 	Values       []EnumDefVal | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type EnumGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | 	GoFormat    *bool | ||||||
|  | } | ||||||
|  |  | ||||||
| var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||||
| @@ -48,7 +53,7 @@ var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerato | |||||||
| //go:embed enum-generate.template | //go:embed enum-generate.template | ||||||
| var templateEnumGenerateText string | var templateEnumGenerateText string | ||||||
|  |  | ||||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | func GenerateEnumSpecs(sourceDir string, destFile string, opt EnumGenOptions) error { | ||||||
|  |  | ||||||
| 	oldChecksum := "N/A" | 	oldChecksum := "N/A" | ||||||
| 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||||
| @@ -61,7 +66,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, true) | 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, langext.Coalesce(opt.GoFormat, true), langext.Coalesce(opt.DebugOutput, false)) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
| @@ -78,7 +83,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool) (string, string, bool, error) { | func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool, debugOutput bool) (string, string, bool, error) { | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -113,13 +118,18 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g | |||||||
| 	pkgname := "" | 	pkgname := "" | ||||||
|  |  | ||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
| 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name())) | 		} | ||||||
|  |  | ||||||
|  | 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return "", "", false, err | 			return "", "", false, err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("\n") | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		allEnums = append(allEnums, fileEnums...) | 		allEnums = append(allEnums, fileEnums...) | ||||||
|  |  | ||||||
| @@ -146,7 +156,7 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g | |||||||
| 	return string(fdata), newChecksum, true, nil | 	return string(fdata), newChecksum, true, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | func processEnumFile(basedir string, fn string, debugOutput bool) ([]EnumDef, string, error) { | ||||||
| 	file, err := os.Open(fn) | 	file, err := os.Open(fn) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, "", err | 		return nil, "", err | ||||||
| @@ -190,8 +200,11 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| 				Values:       make([]EnumDefVal, 0), | 				Values:       make([]EnumDefVal, 0), | ||||||
| 			} | 			} | ||||||
| 			enums = append(enums, def) | 			enums = append(enums, def) | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
| 				fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | 				fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | ||||||
| 			} | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | ||||||
| 			typename := match.GroupByName("type").Value() | 			typename := match.GroupByName("type").Value() | ||||||
| @@ -230,19 +243,24 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| 				if v.EnumTypeName == typename { | 				if v.EnumTypeName == typename { | ||||||
| 					enums[i].Values = append(enums[i].Values, def) | 					enums[i].Values = append(enums[i].Values, def) | ||||||
| 					found = true | 					found = true | ||||||
|  |  | ||||||
|  | 					if debugOutput { | ||||||
| 						if def.Description != nil { | 						if def.Description != nil { | ||||||
| 							fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | 							fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | ||||||
| 						} else { | 						} else { | ||||||
| 							fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | 							fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | ||||||
| 						} | 						} | ||||||
|  | 					} | ||||||
| 					break | 					break | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| 			if !found { | 			if !found { | ||||||
|  | 				if debugOutput { | ||||||
| 					fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | 					fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| 		} | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	return enums, pkgname, nil | 	return enums, pkgname, nil | ||||||
| } | } | ||||||
|   | |||||||
| @@ -7,6 +7,8 @@ import "gogs.mikescher.com/BlackForestBytes/goext/enums" | |||||||
|  |  | ||||||
| const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | {{ $pkgname  := .PkgName }} | ||||||
|  |  | ||||||
| {{range .Enums}} | {{range .Enums}} | ||||||
|  |  | ||||||
| {{ $hasStr   := ( . | hasStr   ) }} | {{ $hasStr   := ( . | hasStr   ) }} | ||||||
| @@ -97,6 +99,14 @@ func (e {{.EnumTypeName}}) VarName() string { | |||||||
| 	return "" | 	return "" | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) TypeName() string { | ||||||
|  | 	return "{{$typename}}" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) PackageName() string { | ||||||
|  | 	return "{{$pkgname }}" | ||||||
|  | } | ||||||
|  |  | ||||||
| func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | ||||||
|     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} |     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} | ||||||
| } | } | ||||||
| @@ -135,3 +145,11 @@ func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue { | |||||||
| {{end}} | {{end}} | ||||||
|  |  | ||||||
| {{end}} | {{end}} | ||||||
|  |  | ||||||
|  | // ================================ ================= ================================ | ||||||
|  |  | ||||||
|  | func AllPackageEnums() []enums.Enum { | ||||||
|  |     return []enums.Enum{ {{range .Enums}} | ||||||
|  |         {{ if gt (len .Values) 0 }} {{  $v := index .Values 0 }} {{ $v.VarName}}, {{end}} // {{ .EnumTypeName }} {{end}} | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -37,10 +37,10 @@ func TestGenerateEnumSpecs(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	tst.AssertEqual(t, cs1, cs2) | 	tst.AssertEqual(t, cs1, cs2) | ||||||
| @@ -76,7 +76,7 @@ func TestGenerateEnumSpecsData(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true) | 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true, true) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
|   | |||||||
| @@ -25,6 +25,10 @@ type IDDef struct { | |||||||
| 	Name         string | 	Name         string | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type IDGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | } | ||||||
|  |  | ||||||
| var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
| var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`)) | var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`)) | ||||||
| @@ -34,7 +38,9 @@ var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = | |||||||
| //go:embed id-generate.template | //go:embed id-generate.template | ||||||
| var templateIDGenerateText string | var templateIDGenerateText string | ||||||
|  |  | ||||||
| func GenerateIDSpecs(sourceDir string, destFile string) error { | func GenerateIDSpecs(sourceDir string, destFile string, opt *IDGenOptions) error { | ||||||
|  |  | ||||||
|  | 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -80,13 +86,18 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	pkgname := "" | 	pkgname := "" | ||||||
|  |  | ||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
| 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name())) | 		} | ||||||
|  |  | ||||||
|  | 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("\n") | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		allIDs = append(allIDs, fileIDs...) | 		allIDs = append(allIDs, fileIDs...) | ||||||
|  |  | ||||||
| @@ -112,7 +123,7 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | func processIDFile(basedir string, fn string, debugOutput bool) ([]IDDef, string, error) { | ||||||
| 	file, err := os.Open(fn) | 	file, err := os.Open(fn) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, "", err | 		return nil, "", err | ||||||
| @@ -153,7 +164,11 @@ func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | |||||||
| 				FileRelative: rfp, | 				FileRelative: rfp, | ||||||
| 				Name:         match.GroupByName("name").Value(), | 				Name:         match.GroupByName("name").Value(), | ||||||
| 			} | 			} | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
| 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||||
|  | 			} | ||||||
|  |  | ||||||
| 			ids = append(ids, def) | 			ids = append(ids, def) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -34,10 +34,10 @@ func TestGenerateIDSpecs(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", &IDGenOptions{DebugOutput: langext.PTrue}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", &IDGenOptions{DebugOutput: langext.PTrue}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
|   | |||||||
| @@ -5,6 +5,8 @@ type Enum interface { | |||||||
| 	ValuesAny() []any | 	ValuesAny() []any | ||||||
| 	ValuesMeta() []EnumMetaValue | 	ValuesMeta() []EnumMetaValue | ||||||
| 	VarName() string | 	VarName() string | ||||||
|  | 	TypeName() string | ||||||
|  | 	PackageName() string | ||||||
| } | } | ||||||
|  |  | ||||||
| type StringEnum interface { | type StringEnum interface { | ||||||
|   | |||||||
| @@ -53,15 +53,11 @@ func (w *GinRoutesWrapper) Group(relativePath string) *GinRoutesWrapper { | |||||||
| func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper { | func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper { | ||||||
| 	defHandler := langext.ArrCopy(w.defaultHandler) | 	defHandler := langext.ArrCopy(w.defaultHandler) | ||||||
| 	defHandler = append(defHandler, middleware...) | 	defHandler = append(defHandler, middleware...) | ||||||
| 	return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler} | 	return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler, absPath: w.absPath} | ||||||
| } | } | ||||||
|  |  | ||||||
| func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper { | func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper { | ||||||
| 	defHandler := langext.ArrCopy(w.defaultHandler) | 	return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) }) | ||||||
| 	defHandler = append(defHandler, func(g *gin.Context) { |  | ||||||
| 		g.Set("goext.jsonfilter", filter) |  | ||||||
| 	}) |  | ||||||
| 	return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler} |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder { | func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder { | ||||||
| @@ -116,10 +112,7 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder { | func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder { | ||||||
| 	w.handlers = append(w.handlers, func(g *gin.Context) { | 	return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) }) | ||||||
| 		g.Set("goext.jsonfilter", filter) |  | ||||||
| 	}) |  | ||||||
| 	return w |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (w *GinRouteBuilder) Handle(handler WHandlerFunc) { | func (w *GinRouteBuilder) Handle(handler WHandlerFunc) { | ||||||
|   | |||||||
							
								
								
									
										20
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										20
									
								
								go.mod
									
									
									
									
									
								
							| @@ -8,14 +8,14 @@ require ( | |||||||
| 	github.com/jmoiron/sqlx v1.3.5 | 	github.com/jmoiron/sqlx v1.3.5 | ||||||
| 	github.com/rs/xid v1.5.0 | 	github.com/rs/xid v1.5.0 | ||||||
| 	github.com/rs/zerolog v1.32.0 | 	github.com/rs/zerolog v1.32.0 | ||||||
| 	go.mongodb.org/mongo-driver v1.13.1 | 	go.mongodb.org/mongo-driver v1.14.0 | ||||||
| 	golang.org/x/crypto v0.19.0 | 	golang.org/x/crypto v0.21.0 | ||||||
| 	golang.org/x/sys v0.17.0 | 	golang.org/x/sys v0.18.0 | ||||||
| 	golang.org/x/term v0.17.0 | 	golang.org/x/term v0.18.0 | ||||||
| ) | ) | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/bytedance/sonic v1.10.2 // indirect | 	github.com/bytedance/sonic v1.11.2 // indirect | ||||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | ||||||
| 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | ||||||
| 	github.com/dustin/go-humanize v1.0.1 // indirect | 	github.com/dustin/go-humanize v1.0.1 // indirect | ||||||
| @@ -23,13 +23,13 @@ require ( | |||||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||||
| 	github.com/go-playground/locales v0.14.1 // indirect | 	github.com/go-playground/locales v0.14.1 // indirect | ||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| 	github.com/go-playground/validator/v10 v10.17.0 // indirect | 	github.com/go-playground/validator/v10 v10.19.0 // indirect | ||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.2 // indirect | ||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
| 	github.com/google/uuid v1.5.0 // indirect | 	github.com/google/uuid v1.5.0 // indirect | ||||||
| 	github.com/json-iterator/go v1.1.12 // indirect | 	github.com/json-iterator/go v1.1.12 // indirect | ||||||
| 	github.com/klauspost/compress v1.17.6 // indirect | 	github.com/klauspost/compress v1.17.7 // indirect | ||||||
| 	github.com/klauspost/cpuid/v2 v2.2.6 // indirect | 	github.com/klauspost/cpuid/v2 v2.2.7 // indirect | ||||||
| 	github.com/leodido/go-urn v1.4.0 // indirect | 	github.com/leodido/go-urn v1.4.0 // indirect | ||||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||||
| 	github.com/mattn/go-isatty v0.0.20 // indirect | 	github.com/mattn/go-isatty v0.0.20 // indirect | ||||||
| @@ -45,10 +45,10 @@ require ( | |||||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||||
| 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | ||||||
| 	golang.org/x/arch v0.7.0 // indirect | 	golang.org/x/arch v0.7.0 // indirect | ||||||
| 	golang.org/x/net v0.21.0 // indirect | 	golang.org/x/net v0.22.0 // indirect | ||||||
| 	golang.org/x/sync v0.6.0 // indirect | 	golang.org/x/sync v0.6.0 // indirect | ||||||
| 	golang.org/x/text v0.14.0 // indirect | 	golang.org/x/text v0.14.0 // indirect | ||||||
| 	google.golang.org/protobuf v1.32.0 // indirect | 	google.golang.org/protobuf v1.33.0 // indirect | ||||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||||
| 	modernc.org/libc v1.37.6 // indirect | 	modernc.org/libc v1.37.6 // indirect | ||||||
| 	modernc.org/mathutil v1.6.0 // indirect | 	modernc.org/mathutil v1.6.0 // indirect | ||||||
|   | |||||||
							
								
								
									
										28
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										28
									
								
								go.sum
									
									
									
									
									
								
							| @@ -2,6 +2,12 @@ github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1 | |||||||
| github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= | github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= | ||||||
| github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= | github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= | ||||||
| github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
|  | github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo= | ||||||
|  | github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
|  | github.com/bytedance/sonic v1.11.1 h1:JC0+6c9FoWYYxakaoa+c5QTtJeiSZNeByOBhXtAFSn4= | ||||||
|  | github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
|  | github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A= | ||||||
|  | github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= | github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | ||||||
| @@ -33,6 +39,10 @@ github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqR | |||||||
| github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
| github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74= | github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74= | ||||||
| github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
|  | github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U= | ||||||
|  | github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
|  | github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= | ||||||
|  | github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||||
| @@ -59,9 +69,13 @@ github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW | |||||||
| github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
| github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= | github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= | ||||||
| github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
|  | github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= | ||||||
|  | github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
| github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | ||||||
| github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | ||||||
| github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | ||||||
| @@ -126,6 +140,8 @@ github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/ | |||||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||||
| go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= | go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= | ||||||
| go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= | go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= | ||||||
|  | go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= | ||||||
|  | go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||||
| golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||||
| golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= | golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= | ||||||
| golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||||
| @@ -139,6 +155,10 @@ golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= | |||||||
| golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= | golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= | ||||||
| golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= | golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= | ||||||
| golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= | golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= | ||||||
|  | golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg= | ||||||
|  | golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= | ||||||
|  | golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= | ||||||
|  | golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= | ||||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||||
| @@ -151,6 +171,8 @@ golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= | |||||||
| golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= | golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= | ||||||
| golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= | golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= | ||||||
| golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= | golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= | ||||||
|  | golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc= | ||||||
|  | golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= | ||||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | ||||||
| @@ -170,12 +192,16 @@ golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= | |||||||
| golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= | golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= | ||||||
| golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
|  | golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= | ||||||
|  | golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||||
| golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||||
| golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= | golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= | ||||||
| golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= | golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= | ||||||
| golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= | golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= | ||||||
| golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= | golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= | ||||||
|  | golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= | ||||||
|  | golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= | ||||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| @@ -193,6 +219,8 @@ golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSm | |||||||
| golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= | golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= | ||||||
| google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= | google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= | ||||||
| google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
|  | google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= | ||||||
|  | google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||||
| gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.384" | const GoextVersion = "0.0.407" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2024-02-09T15:20:46+0100" | const GoextVersionTimestamp = "2024-03-11T16:40:41+0100" | ||||||
|   | |||||||
| @@ -265,6 +265,15 @@ func ArrFirstIndex[T comparable](arr []T, needle T) int { | |||||||
| 	return -1 | 	return -1 | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrFirstIndexFunc[T any](arr []T, comp func(v T) bool) int { | ||||||
|  | 	for i, v := range arr { | ||||||
|  | 		if comp(v) { | ||||||
|  | 			return i | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return -1 | ||||||
|  | } | ||||||
|  |  | ||||||
| func ArrLastIndex[T comparable](arr []T, needle T) int { | func ArrLastIndex[T comparable](arr []T, needle T) int { | ||||||
| 	result := -1 | 	result := -1 | ||||||
| 	for i, v := range arr { | 	for i, v := range arr { | ||||||
| @@ -275,6 +284,16 @@ func ArrLastIndex[T comparable](arr []T, needle T) int { | |||||||
| 	return result | 	return result | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrLastIndexFunc[T any](arr []T, comp func(v T) bool) int { | ||||||
|  | 	result := -1 | ||||||
|  | 	for i, v := range arr { | ||||||
|  | 		if comp(v) { | ||||||
|  | 			result = i | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
| func AddToSet[T comparable](set []T, add T) []T { | func AddToSet[T comparable](set []T, add T) []T { | ||||||
| 	for _, v := range set { | 	for _, v := range set { | ||||||
| 		if v == add { | 		if v == add { | ||||||
|   | |||||||
| @@ -71,3 +71,19 @@ func ForceMap[K comparable, V any](v map[K]V) map[K]V { | |||||||
| 		return v | 		return v | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func MapMerge[K comparable, V any](base map[K]V, arr ...map[K]V) map[K]V { | ||||||
|  | 	res := make(map[K]V, len(base)*(1+len(arr))) | ||||||
|  |  | ||||||
|  | 	for k, v := range base { | ||||||
|  | 		res[k] = v | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, m := range arr { | ||||||
|  | 		for k, v := range m { | ||||||
|  | 			res[k] = v | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return res | ||||||
|  | } | ||||||
|   | |||||||
| @@ -8,12 +8,28 @@ func Sort[T OrderedConstraint](arr []T) { | |||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSorted[T OrderedConstraint](arr []T) []T { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.Slice(arr, func(i1, i2 int) bool { | ||||||
|  | 		return arr[i1] < arr[i2] | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func SortStable[T OrderedConstraint](arr []T) { | func SortStable[T OrderedConstraint](arr []T) { | ||||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
| 		return arr[i1] < arr[i2] | 		return arr[i1] < arr[i2] | ||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedStable[T OrderedConstraint](arr []T) []T { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
|  | 		return arr[i1] < arr[i2] | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func IsSorted[T OrderedConstraint](arr []T) bool { | func IsSorted[T OrderedConstraint](arr []T) bool { | ||||||
| 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | ||||||
| 		return arr[i1] < arr[i2] | 		return arr[i1] < arr[i2] | ||||||
| @@ -26,12 +42,28 @@ func SortSlice[T any](arr []T, less func(v1, v2 T) bool) { | |||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedSlice[T any](arr []T, less func(v1, v2 T) bool) []T { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.Slice(arr, func(i1, i2 int) bool { | ||||||
|  | 		return less(arr[i1], arr[i2]) | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func SortSliceStable[T any](arr []T, less func(v1, v2 T) bool) { | func SortSliceStable[T any](arr []T, less func(v1, v2 T) bool) { | ||||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
| 		return less(arr[i1], arr[i2]) | 		return less(arr[i1], arr[i2]) | ||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedSliceStable[T any](arr []T, less func(v1, v2 T) bool) []T { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
|  | 		return less(arr[i1], arr[i2]) | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func IsSliceSorted[T any](arr []T, less func(v1, v2 T) bool) bool { | func IsSliceSorted[T any](arr []T, less func(v1, v2 T) bool) bool { | ||||||
| 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | ||||||
| 		return less(arr[i1], arr[i2]) | 		return less(arr[i1], arr[i2]) | ||||||
| @@ -44,12 +76,28 @@ func SortBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TEle | |||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) []TElem { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.Slice(arr, func(i1, i2 int) bool { | ||||||
|  | 		return selector(arr[i1]) < selector(arr[i2]) | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func SortByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | func SortByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | ||||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
| 		return selector(arr[i1]) < selector(arr[i2]) | 		return selector(arr[i1]) < selector(arr[i2]) | ||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) []TElem { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
|  | 		return selector(arr[i1]) < selector(arr[i2]) | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func IsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | func IsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | ||||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
| 		return selector(arr[i1]) < selector(arr[i2]) | 		return selector(arr[i1]) < selector(arr[i2]) | ||||||
|   | |||||||
| @@ -9,6 +9,8 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"reflect" | 	"reflect" | ||||||
|  | 	"strconv" | ||||||
|  | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -65,36 +67,20 @@ func (t *Date) UnmarshalJSON(data []byte) error { | |||||||
| 	if err := json.Unmarshal(data, &str); err != nil { | 	if err := json.Unmarshal(data, &str); err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
| 	t0, err := time.Parse(t.FormatStr(), str) | 	return t.ParseString(str) | ||||||
| 	if err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
| 	t.Year = t0.Year() |  | ||||||
| 	t.Month = int(t0.Month()) |  | ||||||
| 	t.Day = t0.Day() |  | ||||||
| 	return nil |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) MarshalJSON() ([]byte, error) { | func (t Date) MarshalJSON() ([]byte, error) { | ||||||
| 	str := t.TimeUTC().Format(t.FormatStr()) | 	str := t.String() | ||||||
| 	return json.Marshal(str) | 	return json.Marshal(str) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) MarshalText() ([]byte, error) { | func (t Date) MarshalText() ([]byte, error) { | ||||||
| 	b := make([]byte, 0, len(t.FormatStr())) | 	return []byte(t.String()), nil | ||||||
| 	return t.TimeUTC().AppendFormat(b, t.FormatStr()), nil |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t *Date) UnmarshalText(data []byte) error { | func (t *Date) UnmarshalText(data []byte) error { | ||||||
| 	var err error | 	return t.ParseString(string(data)) | ||||||
| 	v, err := time.Parse(t.FormatStr(), string(data)) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
| 	t.Year = v.Year() |  | ||||||
| 	t.Month = int(v.Month()) |  | ||||||
| 	t.Day = v.Day() |  | ||||||
| 	return nil |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
| @@ -164,7 +150,7 @@ func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val | |||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) Serialize() string { | func (t Date) Serialize() string { | ||||||
| 	return t.TimeUTC().Format(t.FormatStr()) | 	return t.String() | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) FormatStr() string { | func (t Date) FormatStr() string { | ||||||
| @@ -212,11 +198,48 @@ func (t Date) Format(layout string) string { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) GoString() string { | func (t Date) GoString() string { | ||||||
| 	return t.TimeUTC().GoString() | 	return fmt.Sprintf("rfctime.Date{Year: %d, Month: %d, Day: %d}", t.Year, t.Month, t.Day) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) String() string { | func (t Date) String() string { | ||||||
| 	return t.TimeUTC().String() | 	return fmt.Sprintf("%04d-%02d-%02d", t.Year, t.Month, t.Day) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t *Date) ParseString(v string) error { | ||||||
|  | 	split := strings.Split(v, "-") | ||||||
|  | 	if len(split) != 3 { | ||||||
|  | 		return errors.New("invalid date format: " + v) | ||||||
|  | 	} | ||||||
|  | 	year, err := strconv.ParseInt(split[0], 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||||
|  | 	} | ||||||
|  | 	month, err := strconv.ParseInt(split[1], 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||||
|  | 	} | ||||||
|  | 	day, err := strconv.ParseInt(split[2], 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if year < 0 { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": year is negative") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if month < 1 || month > 12 { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": month is out of range") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if day < 1 || day > 31 { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": day is out of range") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	t.Year = int(year) | ||||||
|  | 	t.Month = int(month) | ||||||
|  | 	t.Day = int(day) | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewDate(t time.Time) Date { | func NewDate(t time.Time) Date { | ||||||
|   | |||||||
							
								
								
									
										148
									
								
								rfctime/time.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										148
									
								
								rfctime/time.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,148 @@ | |||||||
|  | package rfctime | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"strconv" | ||||||
|  | 	"strings" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type Time struct { | ||||||
|  | 	Hour       int | ||||||
|  | 	Minute     int | ||||||
|  | 	Second     int | ||||||
|  | 	NanoSecond int | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Time) Serialize() string { | ||||||
|  | 	return fmt.Sprintf("%04d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Time) SerializeShort() string { | ||||||
|  | 	if t.NanoSecond == 0 && t.Second == 0 { | ||||||
|  | 		return fmt.Sprintf("%02d:%02d", t.Hour, t.Minute) | ||||||
|  | 	} else if t.NanoSecond == 0 { | ||||||
|  | 		return fmt.Sprintf("%02d:%02d:%02d", t.Hour, t.Minute, t.Second) | ||||||
|  | 	} else { | ||||||
|  | 		return fmt.Sprintf("%02d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t *Time) Deserialize(v string) error { | ||||||
|  |  | ||||||
|  | 	var h, m, s, ns string | ||||||
|  |  | ||||||
|  | 	split1 := strings.Split(v, ".") | ||||||
|  |  | ||||||
|  | 	if len(split1) == 2 { | ||||||
|  |  | ||||||
|  | 		split2 := strings.Split(split1[0], ":") | ||||||
|  | 		if len(split2) == 3 { | ||||||
|  |  | ||||||
|  | 			h = split2[0] | ||||||
|  | 			m = split2[1] | ||||||
|  | 			s = split2[2] | ||||||
|  | 			ns = split1[1] | ||||||
|  |  | ||||||
|  | 		} else { | ||||||
|  | 			return fmt.Errorf("invalid time format: %s", v) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 	} else if len(split1) == 1 { | ||||||
|  |  | ||||||
|  | 		split2 := strings.Split(split1[0], ":") | ||||||
|  | 		if len(split2) == 2 { | ||||||
|  |  | ||||||
|  | 			h = split2[0] | ||||||
|  | 			m = split2[1] | ||||||
|  | 			s = "0" | ||||||
|  | 			ns = "0" | ||||||
|  |  | ||||||
|  | 		} else if len(split2) == 3 { | ||||||
|  |  | ||||||
|  | 			h = split2[0] | ||||||
|  | 			m = split2[1] | ||||||
|  | 			s = split2[2] | ||||||
|  | 			ns = "0" | ||||||
|  |  | ||||||
|  | 		} else { | ||||||
|  | 			return fmt.Errorf("invalid time format: %s", v) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  | 		return fmt.Errorf("invalid time format: %s", v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ns = langext.StrPadRight(ns, "0", 9) | ||||||
|  |  | ||||||
|  | 	hh, err := strconv.ParseInt(h, 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return fmt.Errorf("invalid time format: %s", v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	mm, err := strconv.ParseInt(m, 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return fmt.Errorf("invalid time format: %s", v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ss, err := strconv.ParseInt(s, 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return fmt.Errorf("invalid time format: %s", v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	nss, err := strconv.ParseInt(ns, 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return fmt.Errorf("invalid time format: %s", v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	t.Hour = int(hh) | ||||||
|  | 	t.Minute = int(mm) | ||||||
|  | 	t.Second = int(ss) | ||||||
|  | 	t.NanoSecond = int(nss) | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Time) FormatStr() string { | ||||||
|  | 	return "15:04:05.999999999" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Time) GoString() string { | ||||||
|  | 	return fmt.Sprintf("rfctime.NewTime(%d, %d, %d, %d)", t.Hour, t.Minute, t.Second, t.NanoSecond) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Time) String() string { | ||||||
|  | 	return fmt.Sprintf("%04d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewTime(h int, m int, s int, ns int) Time { | ||||||
|  | 	return Time{ | ||||||
|  | 		Hour:       h, | ||||||
|  | 		Minute:     m, | ||||||
|  | 		Second:     s, | ||||||
|  | 		NanoSecond: ns, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewTimeFromTS(t time.Time) Time { | ||||||
|  | 	return Time{ | ||||||
|  | 		Hour:       t.Hour(), | ||||||
|  | 		Minute:     t.Minute(), | ||||||
|  | 		Second:     t.Second(), | ||||||
|  | 		NanoSecond: t.Nanosecond(), | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NowTime(loc *time.Location) Time { | ||||||
|  | 	now := time.Now().In(loc) | ||||||
|  | 	return NewTime(now.Hour(), now.Minute(), now.Second(), now.Nanosecond()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NowTimeLoc() Time { | ||||||
|  | 	return NowTime(time.UTC) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NowTimeUTC() Time { | ||||||
|  | 	return NowTime(time.Local) | ||||||
|  | } | ||||||
| @@ -52,8 +52,7 @@ func TestCreateUpdateStatement(t *testing.T) { | |||||||
|  |  | ||||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||||
| 	db.RegisterDefaultConverter() |  | ||||||
|  |  | ||||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|   | |||||||
							
								
								
									
										32
									
								
								sq/commentTrimmer.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								sq/commentTrimmer.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var CommentTrimmer = NewPreListener(fnTrimComments) | ||||||
|  |  | ||||||
|  | func fnTrimComments(ctx context.Context, cmdtype string, id *uint16, sql *string, params *PP) error { | ||||||
|  |  | ||||||
|  | 	res := make([]string, 0) | ||||||
|  |  | ||||||
|  | 	for _, s := range strings.Split(*sql, "\n") { | ||||||
|  | 		if strings.HasPrefix(strings.TrimSpace(s), "--") { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		idx := strings.Index(s, "--") | ||||||
|  | 		if idx != -1 { | ||||||
|  | 			s = s[:idx] | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		s = strings.TrimRight(s, " \t\r\n") | ||||||
|  |  | ||||||
|  | 		res = append(res, s) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	*sql = strings.Join(res, "\n") | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
							
								
								
									
										142
									
								
								sq/converter.go
									
									
									
									
									
								
							
							
						
						
									
										142
									
								
								sq/converter.go
									
									
									
									
									
								
							| @@ -1,14 +1,10 @@ | |||||||
| package sq | package sq | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" |  | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" |  | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"time" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type DBTypeConverter interface { | type DBTypeConverter interface { | ||||||
| @@ -18,129 +14,16 @@ type DBTypeConverter interface { | |||||||
| 	DBToModel(v any) (any, error) | 	DBToModel(v any) (any, error) | ||||||
| } | } | ||||||
|  |  | ||||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) { | type DBDataConstraint interface { | ||||||
| 	return langext.Conditional(v, int64(1), int64(0)), nil | 	string | langext.NumberConstraint | []byte | ||||||
| }, func(v int64) (bool, error) { |  | ||||||
| 	if v == 0 { |  | ||||||
| 		return false, nil |  | ||||||
| 	} |  | ||||||
| 	if v == 1 { |  | ||||||
| 		return true, nil |  | ||||||
| 	} |  | ||||||
| 	return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v)) |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) { |  | ||||||
| 	return v.UnixMilli(), nil |  | ||||||
| }, func(v int64) (time.Time, error) { |  | ||||||
| 	return time.UnixMilli(v), nil |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) { |  | ||||||
| 	return v.UnixMilli(), nil |  | ||||||
| }, func(v int64) (rfctime.UnixMilliTime, error) { |  | ||||||
| 	return rfctime.NewUnixMilli(time.UnixMilli(v)), nil |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) { |  | ||||||
| 	return v.UnixNano(), nil |  | ||||||
| }, func(v int64) (rfctime.UnixNanoTime, error) { |  | ||||||
| 	return rfctime.NewUnixNano(time.Unix(0, v)), nil |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) { |  | ||||||
| 	return v.Unix(), nil |  | ||||||
| }, func(v int64) (rfctime.UnixTime, error) { |  | ||||||
| 	return rfctime.NewUnix(time.Unix(v, 0)), nil |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| // ConverterRFC339TimeToString |  | ||||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter |  | ||||||
| var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) { |  | ||||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil |  | ||||||
| }, func(v string) (rfctime.RFC3339Time, error) { |  | ||||||
| 	t, err := time.Parse("2006-01-02 15:04:05", v) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return rfctime.RFC3339Time{}, err |  | ||||||
| 	} |  | ||||||
| 	return rfctime.NewRFC3339(t), nil |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| // ConverterRFC339NanoTimeToString |  | ||||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter |  | ||||||
| var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) { |  | ||||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil |  | ||||||
| }, func(v string) (rfctime.RFC3339NanoTime, error) { |  | ||||||
| 	t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return rfctime.RFC3339NanoTime{}, err |  | ||||||
| 	} |  | ||||||
| 	return rfctime.NewRFC3339Nano(t), nil |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterJsonObjToString = NewDBTypeConverter[JsonObj, string](func(v JsonObj) (string, error) { |  | ||||||
| 	mrsh, err := json.Marshal(v) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return "", err |  | ||||||
| 	} |  | ||||||
| 	return string(mrsh), nil |  | ||||||
| }, func(v string) (JsonObj, error) { |  | ||||||
| 	var mrsh JsonObj |  | ||||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { |  | ||||||
| 		return JsonObj{}, err |  | ||||||
| 	} |  | ||||||
| 	return mrsh, nil |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterJsonArrToString = NewDBTypeConverter[JsonArr, string](func(v JsonArr) (string, error) { |  | ||||||
| 	mrsh, err := json.Marshal(v) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return "", err |  | ||||||
| 	} |  | ||||||
| 	return string(mrsh), nil |  | ||||||
| }, func(v string) (JsonArr, error) { |  | ||||||
| 	var mrsh JsonArr |  | ||||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { |  | ||||||
| 		return JsonArr{}, err |  | ||||||
| 	} |  | ||||||
| 	return mrsh, nil |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) { |  | ||||||
| 	return v.Category, nil |  | ||||||
| }, func(v string) (exerr.ErrorCategory, error) { |  | ||||||
| 	for _, cat := range exerr.AllCategories { |  | ||||||
| 		if cat.Category == v { |  | ||||||
| 			return cat, nil |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
| 	return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory") |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) { |  | ||||||
| 	return v.Severity, nil |  | ||||||
| }, func(v string) (exerr.ErrorSeverity, error) { |  | ||||||
| 	for _, sev := range exerr.AllSeverities { |  | ||||||
| 		if sev.Severity == v { |  | ||||||
| 			return sev, nil |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
| 	return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity") |  | ||||||
| }) |  | ||||||
|  |  | ||||||
| var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) { |  | ||||||
| 	return v.Key, nil |  | ||||||
| }, func(v string) (exerr.ErrorType, error) { |  | ||||||
| 	for _, etp := range exerr.ListRegisteredTypes() { |  | ||||||
| 		if etp.Key == v { |  | ||||||
| 			return etp, nil |  | ||||||
| 		} |  | ||||||
| } | } | ||||||
|  |  | ||||||
| 	return exerr.NewType(v, nil), nil | type DatabaseConvertible[TModelData any, TDBData DBDataConstraint] interface { | ||||||
| }) | 	MarshalToDB(v TModelData) (TDBData, error) | ||||||
|  | 	UnmarshalToModel(v TDBData) (TModelData, error) | ||||||
|  | } | ||||||
|  |  | ||||||
| type dbTypeConverterImpl[TModelData any, TDBData any] struct { | type dbTypeConverterImpl[TModelData any, TDBData DBDataConstraint] struct { | ||||||
| 	dbTypeString    string | 	dbTypeString    string | ||||||
| 	modelTypeString string | 	modelTypeString string | ||||||
| 	todb            func(v TModelData) (TDBData, error) | 	todb            func(v TModelData) (TDBData, error) | ||||||
| @@ -169,7 +52,7 @@ func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error) | |||||||
| 	return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v)) | 	return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v)) | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter { | func NewDBTypeConverter[TModelData any, TDBData DBDataConstraint](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter { | ||||||
| 	return &dbTypeConverterImpl[TModelData, TDBData]{ | 	return &dbTypeConverterImpl[TModelData, TDBData]{ | ||||||
| 		dbTypeString:    fmt.Sprintf("%T", *new(TDBData)), | 		dbTypeString:    fmt.Sprintf("%T", *new(TDBData)), | ||||||
| 		modelTypeString: fmt.Sprintf("%T", *new(TModelData)), | 		modelTypeString: fmt.Sprintf("%T", *new(TModelData)), | ||||||
| @@ -178,6 +61,15 @@ func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TD | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func NewAutoDBTypeConverter[TDBData DBDataConstraint, TModelData DatabaseConvertible[TModelData, TDBData]](obj TModelData) DBTypeConverter { | ||||||
|  | 	return &dbTypeConverterImpl[TModelData, TDBData]{ | ||||||
|  | 		dbTypeString:    fmt.Sprintf("%T", *new(TDBData)), | ||||||
|  | 		modelTypeString: fmt.Sprintf("%T", *new(TModelData)), | ||||||
|  | 		todb:            obj.MarshalToDB, | ||||||
|  | 		tomodel:         obj.UnmarshalToModel, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
| func convertValueToDB(q Queryable, value any) (any, error) { | func convertValueToDB(q Queryable, value any) (any, error) { | ||||||
| 	modelTypeStr := fmt.Sprintf("%T", value) | 	modelTypeStr := fmt.Sprintf("%T", value) | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										147
									
								
								sq/converterDefault.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										147
									
								
								sq/converterDefault.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,147 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // ========================== COMMON DATATYPES ========================== | ||||||
|  |  | ||||||
|  | var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) { | ||||||
|  | 	return langext.Conditional(v, int64(1), int64(0)), nil | ||||||
|  | }, func(v int64) (bool, error) { | ||||||
|  | 	if v == 0 { | ||||||
|  | 		return false, nil | ||||||
|  | 	} | ||||||
|  | 	if v == 1 { | ||||||
|  | 		return true, nil | ||||||
|  | 	} | ||||||
|  | 	return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v)) | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) { | ||||||
|  | 	return v.UnixMilli(), nil | ||||||
|  | }, func(v int64) (time.Time, error) { | ||||||
|  | 	return time.UnixMilli(v), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | // ========================== RFCTIME ========================== | ||||||
|  |  | ||||||
|  | var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) { | ||||||
|  | 	return v.UnixMilli(), nil | ||||||
|  | }, func(v int64) (rfctime.UnixMilliTime, error) { | ||||||
|  | 	return rfctime.NewUnixMilli(time.UnixMilli(v)), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) { | ||||||
|  | 	return v.UnixNano(), nil | ||||||
|  | }, func(v int64) (rfctime.UnixNanoTime, error) { | ||||||
|  | 	return rfctime.NewUnixNano(time.Unix(0, v)), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) { | ||||||
|  | 	return v.Unix(), nil | ||||||
|  | }, func(v int64) (rfctime.UnixTime, error) { | ||||||
|  | 	return rfctime.NewUnix(time.Unix(v, 0)), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | // ConverterRFC339TimeToString | ||||||
|  | // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||||
|  | var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) { | ||||||
|  | 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil | ||||||
|  | }, func(v string) (rfctime.RFC3339Time, error) { | ||||||
|  | 	t, err := time.Parse("2006-01-02 15:04:05", v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return rfctime.RFC3339Time{}, err | ||||||
|  | 	} | ||||||
|  | 	return rfctime.NewRFC3339(t), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | // ConverterRFC339NanoTimeToString | ||||||
|  | // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||||
|  | var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) { | ||||||
|  | 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil | ||||||
|  | }, func(v string) (rfctime.RFC3339NanoTime, error) { | ||||||
|  | 	t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return rfctime.RFC3339NanoTime{}, err | ||||||
|  | 	} | ||||||
|  | 	return rfctime.NewRFC3339Nano(t), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) { | ||||||
|  | 	return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil | ||||||
|  | }, func(v string) (rfctime.Date, error) { | ||||||
|  | 	d := rfctime.Date{} | ||||||
|  | 	if err := d.ParseString(v); err != nil { | ||||||
|  | 		return rfctime.Date{}, err | ||||||
|  | 	} else { | ||||||
|  | 		return d, nil | ||||||
|  | 	} | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) { | ||||||
|  | 	return v.SerializeShort(), nil | ||||||
|  | }, func(v string) (rfctime.Time, error) { | ||||||
|  | 	res := rfctime.Time{} | ||||||
|  | 	err := res.Deserialize(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return rfctime.Time{}, err | ||||||
|  | 	} | ||||||
|  | 	return res, nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterRFCSecondsF64ToString = NewDBTypeConverter[rfctime.SecondsF64, float64](func(v rfctime.SecondsF64) (float64, error) { | ||||||
|  | 	return v.Seconds(), nil | ||||||
|  | }, func(v float64) (rfctime.SecondsF64, error) { | ||||||
|  | 	return rfctime.NewSecondsF64(timeext.FromSeconds(v)), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | // ========================== JSON ========================== | ||||||
|  |  | ||||||
|  | var ConverterJsonObjToString = NewAutoDBTypeConverter(JsonObj{}) | ||||||
|  |  | ||||||
|  | var ConverterJsonArrToString = NewAutoDBTypeConverter(JsonArr{}) | ||||||
|  |  | ||||||
|  | // Json[T] must be registered manually for each gen-type | ||||||
|  |  | ||||||
|  | // ========================== EXERR ========================== | ||||||
|  |  | ||||||
|  | var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) { | ||||||
|  | 	return v.Category, nil | ||||||
|  | }, func(v string) (exerr.ErrorCategory, error) { | ||||||
|  | 	for _, cat := range exerr.AllCategories { | ||||||
|  | 		if cat.Category == v { | ||||||
|  | 			return cat, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory") | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) { | ||||||
|  | 	return v.Severity, nil | ||||||
|  | }, func(v string) (exerr.ErrorSeverity, error) { | ||||||
|  | 	for _, sev := range exerr.AllSeverities { | ||||||
|  | 		if sev.Severity == v { | ||||||
|  | 			return sev, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity") | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) { | ||||||
|  | 	return v.Key, nil | ||||||
|  | }, func(v string) (exerr.ErrorType, error) { | ||||||
|  | 	for _, etp := range exerr.ListRegisteredTypes() { | ||||||
|  | 		if etp.Key == v { | ||||||
|  | 			return etp, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return exerr.NewType(v, nil), nil | ||||||
|  | }) | ||||||
| @@ -17,7 +17,11 @@ type DB interface { | |||||||
| 	AddListener(listener Listener) | 	AddListener(listener Listener) | ||||||
| 	Exit() error | 	Exit() error | ||||||
| 	RegisterConverter(DBTypeConverter) | 	RegisterConverter(DBTypeConverter) | ||||||
| 	RegisterDefaultConverter() | } | ||||||
|  |  | ||||||
|  | type DBOptions struct { | ||||||
|  | 	RegisterDefaultConverter *bool | ||||||
|  | 	RegisterCommentTrimmer   *bool | ||||||
| } | } | ||||||
|  |  | ||||||
| type database struct { | type database struct { | ||||||
| @@ -28,13 +32,23 @@ type database struct { | |||||||
| 	conv  []DBTypeConverter | 	conv  []DBTypeConverter | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewDB(db *sqlx.DB) DB { | func NewDB(db *sqlx.DB, opt DBOptions) DB { | ||||||
| 	return &database{ | 	sqdb := &database{ | ||||||
| 		db:    db, | 		db:    db, | ||||||
| 		txctr: 0, | 		txctr: 0, | ||||||
| 		lock:  sync.Mutex{}, | 		lock:  sync.Mutex{}, | ||||||
| 		lstr:  make([]Listener, 0), | 		lstr:  make([]Listener, 0), | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if langext.Coalesce(opt.RegisterDefaultConverter, true) { | ||||||
|  | 		sqdb.registerDefaultConverter() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if langext.Coalesce(opt.RegisterCommentTrimmer, true) { | ||||||
|  | 		sqdb.AddListener(CommentTrimmer) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return sqdb | ||||||
| } | } | ||||||
|  |  | ||||||
| func (db *database) AddListener(listener Listener) { | func (db *database) AddListener(listener Listener) { | ||||||
| @@ -141,16 +155,23 @@ func (db *database) RegisterConverter(conv DBTypeConverter) { | |||||||
| 	db.conv = append(db.conv, conv) | 	db.conv = append(db.conv, conv) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (db *database) RegisterDefaultConverter() { | func (db *database) registerDefaultConverter() { | ||||||
| 	db.RegisterConverter(ConverterBoolToBit) | 	db.RegisterConverter(ConverterBoolToBit) | ||||||
|  |  | ||||||
| 	db.RegisterConverter(ConverterTimeToUnixMillis) | 	db.RegisterConverter(ConverterTimeToUnixMillis) | ||||||
|  |  | ||||||
| 	db.RegisterConverter(ConverterRFCUnixMilliTimeToUnixMillis) | 	db.RegisterConverter(ConverterRFCUnixMilliTimeToUnixMillis) | ||||||
| 	db.RegisterConverter(ConverterRFCUnixNanoTimeToUnixNanos) | 	db.RegisterConverter(ConverterRFCUnixNanoTimeToUnixNanos) | ||||||
| 	db.RegisterConverter(ConverterRFCUnixTimeToUnixSeconds) | 	db.RegisterConverter(ConverterRFCUnixTimeToUnixSeconds) | ||||||
| 	db.RegisterConverter(ConverterRFC339TimeToString) | 	db.RegisterConverter(ConverterRFC339TimeToString) | ||||||
| 	db.RegisterConverter(ConverterRFC339NanoTimeToString) | 	db.RegisterConverter(ConverterRFC339NanoTimeToString) | ||||||
|  | 	db.RegisterConverter(ConverterRFCDateToString) | ||||||
|  | 	db.RegisterConverter(ConverterRFCTimeToString) | ||||||
|  | 	db.RegisterConverter(ConverterRFCSecondsF64ToString) | ||||||
|  |  | ||||||
| 	db.RegisterConverter(ConverterJsonObjToString) | 	db.RegisterConverter(ConverterJsonObjToString) | ||||||
| 	db.RegisterConverter(ConverterJsonArrToString) | 	db.RegisterConverter(ConverterJsonArrToString) | ||||||
|  |  | ||||||
| 	db.RegisterConverter(ConverterExErrCategoryToString) | 	db.RegisterConverter(ConverterExErrCategoryToString) | ||||||
| 	db.RegisterConverter(ConverterExErrSeverityToString) | 	db.RegisterConverter(ConverterExErrSeverityToString) | ||||||
| 	db.RegisterConverter(ConverterExErrTypeToString) | 	db.RegisterConverter(ConverterExErrTypeToString) | ||||||
|   | |||||||
							
								
								
									
										56
									
								
								sq/filter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								sq/filter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||||
|  |  | ||||||
|  | type FilterSort struct { | ||||||
|  | 	Field     string | ||||||
|  | 	Direction ct.SortDirection | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type PaginateFilter interface { | ||||||
|  | 	SQL(params PP) (filterClause string, joinClause string, joinTables []string) | ||||||
|  | 	Sort() []FilterSort | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type genericPaginateFilter struct { | ||||||
|  | 	sql  func(params PP) (filterClause string, joinClause string, joinTables []string) | ||||||
|  | 	sort func() []FilterSort | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genericPaginateFilter) SQL(params PP) (filterClause string, joinClause string, joinTables []string) { | ||||||
|  | 	return g.sql(params) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genericPaginateFilter) Sort() []FilterSort { | ||||||
|  | 	return g.sort() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPaginateFilter(sql func(params PP) (filterClause string, joinClause string, joinTables []string), sort []FilterSort) PaginateFilter { | ||||||
|  | 	return genericPaginateFilter{ | ||||||
|  | 		sql: func(params PP) (filterClause string, joinClause string, joinTables []string) { | ||||||
|  | 			return sql(params) | ||||||
|  | 		}, | ||||||
|  | 		sort: func() []FilterSort { | ||||||
|  | 			return sort | ||||||
|  | 		}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewSimplePaginateFilter(filterClause string, filterParams PP, sort []FilterSort) PaginateFilter { | ||||||
|  | 	return genericPaginateFilter{ | ||||||
|  | 		sql: func(params PP) (string, string, []string) { | ||||||
|  | 			params.AddAll(filterParams) | ||||||
|  | 			return filterClause, "", nil | ||||||
|  | 		}, | ||||||
|  | 		sort: func() []FilterSort { | ||||||
|  | 			return sort | ||||||
|  | 		}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewEmptyPaginateFilter() PaginateFilter { | ||||||
|  | 	return genericPaginateFilter{ | ||||||
|  | 		sql:  func(params PP) (string, string, []string) { return "1=1", "", nil }, | ||||||
|  | 		sort: func() []FilterSort { return make([]FilterSort, 0) }, | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -31,7 +31,7 @@ func HashMattnSqliteSchema(ctx context.Context, schemaStr string) (string, error | |||||||
| 		return "", err | 		return "", err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{}) | ||||||
|  |  | ||||||
| 	_, err = db.Exec(ctx, schemaStr, PP{}) | 	_, err = db.Exec(ctx, schemaStr, PP{}) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -59,7 +59,7 @@ func HashGoSqliteSchema(ctx context.Context, schemaStr string) (string, error) { | |||||||
| 		return "", err | 		return "", err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{}) | ||||||
|  |  | ||||||
| 	_, err = db.Exec(ctx, schemaStr, PP{}) | 	_, err = db.Exec(ctx, schemaStr, PP{}) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
|   | |||||||
							
								
								
									
										54
									
								
								sq/json.go
									
									
									
									
									
								
							
							
						
						
									
										54
									
								
								sq/json.go
									
									
									
									
									
								
							| @@ -1,5 +1,59 @@ | |||||||
| package sq | package sq | ||||||
|  |  | ||||||
|  | import "encoding/json" | ||||||
|  |  | ||||||
| type JsonObj map[string]any | type JsonObj map[string]any | ||||||
|  |  | ||||||
|  | func (j JsonObj) MarshalToDB(v JsonObj) (string, error) { | ||||||
|  | 	mrsh, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  | 	return string(mrsh), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j JsonObj) UnmarshalToModel(v string) (JsonObj, error) { | ||||||
|  | 	var mrsh JsonObj | ||||||
|  | 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||||
|  | 		return JsonObj{}, err | ||||||
|  | 	} | ||||||
|  | 	return mrsh, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| type JsonArr []any | type JsonArr []any | ||||||
|  |  | ||||||
|  | func (j JsonArr) MarshalToDB(v JsonArr) (string, error) { | ||||||
|  | 	mrsh, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  | 	return string(mrsh), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j JsonArr) UnmarshalToModel(v string) (JsonArr, error) { | ||||||
|  | 	var mrsh JsonArr | ||||||
|  | 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||||
|  | 		return JsonArr{}, err | ||||||
|  | 	} | ||||||
|  | 	return mrsh, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type AutoJson[T any] struct { | ||||||
|  | 	Value T | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j AutoJson[T]) MarshalToDB(v AutoJson[T]) (string, error) { | ||||||
|  | 	mrsh, err := json.Marshal(v.Value) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  | 	return string(mrsh), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j AutoJson[T]) UnmarshalToModel(v string) (AutoJson[T], error) { | ||||||
|  | 	mrsh := *new(T) | ||||||
|  | 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||||
|  | 		return AutoJson[T]{}, err | ||||||
|  | 	} | ||||||
|  | 	return AutoJson[T]{Value: mrsh}, nil | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										48
									
								
								sq/list.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								sq/list.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func Iterate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int, consumer func(v TData) error) (int, error) { | ||||||
|  | 	if filter == nil { | ||||||
|  | 		filter = NewEmptyPaginateFilter() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	prepParams := PP{} | ||||||
|  |  | ||||||
|  | 	sortOrder := filter.Sort() | ||||||
|  | 	sortCond := "" | ||||||
|  | 	if len(sortOrder) > 0 { | ||||||
|  | 		sortCond = "ORDER BY " | ||||||
|  | 		for i, v := range sortOrder { | ||||||
|  | 			if i > 0 { | ||||||
|  | 				sortCond += ", " | ||||||
|  | 			} | ||||||
|  | 			sortCond += v.Field + " " + string(v.Direction) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pageCond := "" | ||||||
|  | 	if limit != nil { | ||||||
|  | 		pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1))) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	filterCond, joinCond, joinTables := filter.SQL(prepParams) | ||||||
|  |  | ||||||
|  | 	selectCond := table + ".*" | ||||||
|  | 	for _, v := range joinTables { | ||||||
|  | 		selectCond += ", " + v + ".*" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond | ||||||
|  |  | ||||||
|  | 	rows, err := q.Query(ctx, sqlQueryData, prepParams) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return IterateAll[TData](ctx, q, rows, scanMode, scanSec, true, consumer) | ||||||
|  | } | ||||||
							
								
								
									
										169
									
								
								sq/listener.go
									
									
									
									
									
								
							
							
						
						
									
										169
									
								
								sq/listener.go
									
									
									
									
									
								
							| @@ -17,3 +17,172 @@ type Listener interface { | |||||||
| 	PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) | 	PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||||
| 	PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) | 	PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type genListener struct { | ||||||
|  | 	prePing        func(ctx context.Context) error | ||||||
|  | 	preTxBegin     func(ctx context.Context, txid uint16) error | ||||||
|  | 	preTxCommit    func(txid uint16) error | ||||||
|  | 	preTxRollback  func(txid uint16) error | ||||||
|  | 	preQuery       func(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||||
|  | 	preExec        func(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||||
|  | 	postPing       func(result error) | ||||||
|  | 	postTxBegin    func(txid uint16, result error) | ||||||
|  | 	postTxCommit   func(txid uint16, result error) | ||||||
|  | 	postTxRollback func(txid uint16, result error) | ||||||
|  | 	postQuery      func(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||||
|  | 	postExec       func(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PrePing(ctx context.Context) error { | ||||||
|  | 	if g.prePing != nil { | ||||||
|  | 		return g.prePing(ctx) | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error { | ||||||
|  | 	if g.preTxBegin != nil { | ||||||
|  | 		return g.preTxBegin(ctx, txid) | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PreTxCommit(txid uint16) error { | ||||||
|  | 	if g.preTxCommit != nil { | ||||||
|  | 		return g.preTxCommit(txid) | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PreTxRollback(txid uint16) error { | ||||||
|  | 	if g.preTxRollback != nil { | ||||||
|  | 		return g.preTxRollback(txid) | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||||
|  | 	if g.preQuery != nil { | ||||||
|  | 		return g.preQuery(ctx, txID, sql, params) | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||||
|  | 	if g.preExec != nil { | ||||||
|  | 		return g.preExec(ctx, txID, sql, params) | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PostPing(result error) { | ||||||
|  | 	if g.postPing != nil { | ||||||
|  | 		g.postPing(result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PostTxBegin(txid uint16, result error) { | ||||||
|  | 	if g.postTxBegin != nil { | ||||||
|  | 		g.postTxBegin(txid, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PostTxCommit(txid uint16, result error) { | ||||||
|  | 	if g.postTxCommit != nil { | ||||||
|  | 		g.postTxCommit(txid, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PostTxRollback(txid uint16, result error) { | ||||||
|  | 	if g.postTxRollback != nil { | ||||||
|  | 		g.postTxRollback(txid, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||||
|  | 	if g.postQuery != nil { | ||||||
|  | 		g.postQuery(txID, sqlOriginal, sqlReal, params) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||||
|  | 	if g.postExec != nil { | ||||||
|  | 		g.postExec(txID, sqlOriginal, sqlReal, params) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPrePingListener(f func(ctx context.Context) error) Listener { | ||||||
|  | 	return genListener{prePing: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPreTxBeginListener(f func(ctx context.Context, txid uint16) error) Listener { | ||||||
|  | 	return genListener{preTxBegin: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPreTxCommitListener(f func(txid uint16) error) Listener { | ||||||
|  | 	return genListener{preTxCommit: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPreTxRollbackListener(f func(txid uint16) error) Listener { | ||||||
|  | 	return genListener{preTxRollback: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener { | ||||||
|  | 	return genListener{preQuery: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener { | ||||||
|  | 	return genListener{preExec: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPreListener(f func(ctx context.Context, cmdtype string, txID *uint16, sql *string, params *PP) error) Listener { | ||||||
|  | 	return genListener{ | ||||||
|  | 		preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||||
|  | 			return f(ctx, "EXEC", txID, sql, params) | ||||||
|  | 		}, | ||||||
|  | 		preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||||
|  | 			return f(ctx, "QUERY", txID, sql, params) | ||||||
|  | 		}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPostPingListener(f func(result error)) Listener { | ||||||
|  | 	return genListener{postPing: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPostTxBeginListener(f func(txid uint16, result error)) Listener { | ||||||
|  | 	return genListener{postTxBegin: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPostTxCommitListener(f func(txid uint16, result error)) Listener { | ||||||
|  | 	return genListener{postTxCommit: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPostTxRollbackListener(f func(txid uint16, result error)) Listener { | ||||||
|  | 	return genListener{postTxRollback: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||||
|  | 	return genListener{postQuery: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||||
|  | 	return genListener{postExec: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||||
|  | 	return genListener{ | ||||||
|  | 		postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||||
|  | 			f("EXEC", txID, sqlOriginal, sqlReal, params) | ||||||
|  | 		}, | ||||||
|  | 		postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||||
|  | 			f("QUERY", txID, sqlOriginal, sqlReal, params) | ||||||
|  | 		}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|   | |||||||
| @@ -3,23 +3,16 @@ package sq | |||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type PaginateFilter interface { |  | ||||||
| 	SQL(params PP) (filterClause string, joinClause string, joinTables []string) |  | ||||||
| 	Sort() []FilterSort |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type FilterSort struct { |  | ||||||
| 	Field     string |  | ||||||
| 	Direction ct.SortDirection |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||||
|  | 	if filter == nil { | ||||||
|  | 		filter = NewEmptyPaginateFilter() | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	prepParams := PP{} | 	prepParams := PP{} | ||||||
|  |  | ||||||
| 	sortOrder := filter.Sort() | 	sortOrder := filter.Sort() | ||||||
| @@ -101,6 +94,10 @@ func Paginate[TData any](ctx context.Context, q Queryable, table string, filter | |||||||
| } | } | ||||||
|  |  | ||||||
| func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | ||||||
|  | 	if filter == nil { | ||||||
|  | 		filter = NewEmptyPaginateFilter() | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	prepParams := PP{} | 	prepParams := PP{} | ||||||
|  |  | ||||||
| 	filterCond, joinCond, _ := filter.SQL(prepParams) | 	filterCond, joinCond, _ := filter.SQL(prepParams) | ||||||
|   | |||||||
| @@ -20,6 +20,12 @@ func (pp *PP) Add(v any) string { | |||||||
| 	return id | 	return id | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (pp *PP) AddAll(other PP) { | ||||||
|  | 	for id, v := range other { | ||||||
|  | 		(*pp)[id] = v | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
| func PPID() string { | func PPID() string { | ||||||
| 	return "p_" + langext.RandBase62(8) | 	return "p_" + langext.RandBase62(8) | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										148
									
								
								sq/scanner.go
									
									
									
									
									
								
							
							
						
						
									
										148
									
								
								sq/scanner.go
									
									
									
									
									
								
							| @@ -4,9 +4,11 @@ import ( | |||||||
| 	"context" | 	"context" | ||||||
| 	"database/sql" | 	"database/sql" | ||||||
| 	"errors" | 	"errors" | ||||||
|  | 	"fmt" | ||||||
| 	"github.com/jmoiron/sqlx" | 	"github.com/jmoiron/sqlx" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"reflect" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type StructScanMode string | type StructScanMode string | ||||||
| @@ -38,6 +40,49 @@ func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, | |||||||
| 	return sqlr, nil | 	return sqlr, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func InsertAndQuerySingle[TData any](ctx context.Context, q Queryable, tableName string, v TData, idColumn string, mode StructScanMode, sec StructScanSafety) (TData, error) { | ||||||
|  |  | ||||||
|  | 	rval := reflect.ValueOf(v) | ||||||
|  |  | ||||||
|  | 	idRVal := fieldByTag(rval, "db", idColumn) | ||||||
|  | 	if !idRVal.IsValid() || idRVal.IsZero() { | ||||||
|  | 		return *new(TData), fmt.Errorf("failed to find idColumn '%s' in %T", idColumn, v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	idValue, err := convertValueToDB(q, idRVal.Interface()) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	_, err = InsertSingle[TData](ctx, q, tableName, v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pp := PP{} | ||||||
|  |  | ||||||
|  | 	//goland:noinspection ALL | ||||||
|  | 	sqlstr := fmt.Sprintf("SELECT * FROM %s WHERE %s = :%s", tableName, idColumn, pp.Add(idValue)) | ||||||
|  |  | ||||||
|  | 	return QuerySingle[TData](ctx, q, sqlstr, pp, mode, sec) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func fieldByTag(rval reflect.Value, tagkey string, tagval string) reflect.Value { | ||||||
|  | 	rtyp := rval.Type() | ||||||
|  | 	for i := 0; i < rtyp.NumField(); i++ { | ||||||
|  | 		rsfield := rtyp.Field(i) | ||||||
|  |  | ||||||
|  | 		if !rsfield.IsExported() { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if rsfield.Tag.Get(tagkey) == tagval { | ||||||
|  | 			return rval.Field(i) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	panic(fmt.Sprintf("tag %s = '%s' not found in %s", tagkey, tagval, rtyp.Name())) | ||||||
|  | } | ||||||
|  |  | ||||||
| func InsertMultiple[TData any](ctx context.Context, q Queryable, tableName string, vArr []TData, maxBatch int) ([]sql.Result, error) { | func InsertMultiple[TData any](ctx context.Context, q Queryable, tableName string, vArr []TData, maxBatch int) ([]sql.Result, error) { | ||||||
|  |  | ||||||
| 	if len(vArr) == 0 { | 	if len(vArr) == 0 { | ||||||
| @@ -89,6 +134,33 @@ func UpdateSingle[TData any](ctx context.Context, q Queryable, tableName string, | |||||||
| 	return sqlr, nil | 	return sqlr, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func UpdateAndQuerySingle[TData any](ctx context.Context, q Queryable, tableName string, v TData, idColumn string, mode StructScanMode, sec StructScanSafety) (TData, error) { | ||||||
|  |  | ||||||
|  | 	rval := reflect.ValueOf(v) | ||||||
|  |  | ||||||
|  | 	idRVal := fieldByTag(rval, "db", idColumn) | ||||||
|  | 	if !idRVal.IsValid() || idRVal.IsZero() { | ||||||
|  | 		return *new(TData), fmt.Errorf("failed to find idColumn '%s' in %T", idColumn, v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	idValue, err := convertValueToDB(q, idRVal.Interface()) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	_, err = UpdateSingle[TData](ctx, q, tableName, v, idColumn) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pp := PP{} | ||||||
|  |  | ||||||
|  | 	//goland:noinspection ALL | ||||||
|  | 	sqlstr := fmt.Sprintf("SELECT * FROM %s WHERE %s = :%s", tableName, idColumn, pp.Add(idValue)) | ||||||
|  |  | ||||||
|  | 	return QuerySingle[TData](ctx, q, sqlstr, pp, mode, sec) | ||||||
|  | } | ||||||
|  |  | ||||||
| func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) (TData, error) { | func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) (TData, error) { | ||||||
| 	rows, err := q.Query(ctx, sql, pp) | 	rows, err := q.Query(ctx, sql, pp) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -261,3 +333,79 @@ func ScanAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode | |||||||
| 	} | 	} | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func IterateAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool, consumer func(v TData) error) (int, error) { | ||||||
|  | 	var strscan *StructScanner | ||||||
|  |  | ||||||
|  | 	if sec == Safe { | ||||||
|  | 		strscan = NewStructScanner(rows, false) | ||||||
|  | 		var data TData | ||||||
|  | 		err := strscan.Start(&data) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return 0, err | ||||||
|  | 		} | ||||||
|  | 	} else if sec == Unsafe { | ||||||
|  | 		strscan = NewStructScanner(rows, true) | ||||||
|  | 		var data TData | ||||||
|  | 		err := strscan.Start(&data) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return 0, err | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		return 0, errors.New("unknown value for <sec>") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	rcount := 0 | ||||||
|  |  | ||||||
|  | 	for rows.Next() { | ||||||
|  |  | ||||||
|  | 		if err := ctx.Err(); err != nil { | ||||||
|  | 			return rcount, err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if mode == SModeFast { | ||||||
|  | 			var data TData | ||||||
|  | 			err := strscan.StructScanBase(&data) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return rcount, err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			err = consumer(data) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return rcount, exerr.Wrap(err, "").Build() | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			rcount++ | ||||||
|  |  | ||||||
|  | 		} else if mode == SModeExtended { | ||||||
|  | 			var data TData | ||||||
|  | 			err := strscan.StructScanExt(q, &data) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return rcount, err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			err = consumer(data) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return rcount, exerr.Wrap(err, "").Build() | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			rcount++ | ||||||
|  |  | ||||||
|  | 		} else { | ||||||
|  | 			return rcount, errors.New("unknown value for <mode>") | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if close { | ||||||
|  | 		err := strscan.rows.Close() | ||||||
|  | 		if err != nil { | ||||||
|  | 			return rcount, err | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if err := rows.Err(); err != nil { | ||||||
|  | 		return rcount, err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return rcount, nil | ||||||
|  | } | ||||||
|   | |||||||
| @@ -36,8 +36,7 @@ func TestInsertSingle(t *testing.T) { | |||||||
|  |  | ||||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||||
| 	db.RegisterDefaultConverter() |  | ||||||
|  |  | ||||||
| 	_, err := db.Exec(ctx, ` | 	_, err := db.Exec(ctx, ` | ||||||
| 		CREATE TABLE requests (  | 		CREATE TABLE requests (  | ||||||
| @@ -90,8 +89,7 @@ func TestUpdateSingle(t *testing.T) { | |||||||
|  |  | ||||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||||
| 	db.RegisterDefaultConverter() |  | ||||||
|  |  | ||||||
| 	_, err := db.Exec(ctx, ` | 	_, err := db.Exec(ctx, ` | ||||||
| 		CREATE TABLE requests (  | 		CREATE TABLE requests (  | ||||||
| @@ -176,8 +174,7 @@ func TestInsertMultiple(t *testing.T) { | |||||||
|  |  | ||||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||||
| 	db.RegisterDefaultConverter() |  | ||||||
|  |  | ||||||
| 	_, err := db.Exec(ctx, ` | 	_, err := db.Exec(ctx, ` | ||||||
| 		CREATE TABLE requests (  | 		CREATE TABLE requests (  | ||||||
|   | |||||||
| @@ -36,8 +36,7 @@ func TestTypeConverter1(t *testing.T) { | |||||||
|  |  | ||||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||||
| 	db.RegisterDefaultConverter() |  | ||||||
|  |  | ||||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
| @@ -71,8 +70,7 @@ func TestTypeConverter2(t *testing.T) { | |||||||
|  |  | ||||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||||
| 	db.RegisterDefaultConverter() |  | ||||||
|  |  | ||||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
| @@ -116,8 +114,7 @@ func TestTypeConverter3(t *testing.T) { | |||||||
|  |  | ||||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
| 	db := NewDB(xdb) | 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||||
| 	db.RegisterDefaultConverter() |  | ||||||
|  |  | ||||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NULL, PRIMARY KEY (id) ) STRICT", PP{}) | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|   | |||||||
| @@ -38,6 +38,13 @@ type fullTypeRef struct { | |||||||
| 	Index          []int | 	Index          []int | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type IColl interface { | ||||||
|  | 	Collection() *mongo.Collection | ||||||
|  | 	Name() string | ||||||
|  | 	Indexes() mongo.IndexView | ||||||
|  | 	Drop(ctx context.Context) error | ||||||
|  | } | ||||||
|  |  | ||||||
| type Coll[TData any] struct { | type Coll[TData any] struct { | ||||||
| 	coll                *mongo.Collection                                        // internal mongo collection, access via Collection() | 	coll                *mongo.Collection                                        // internal mongo collection, access via Collection() | ||||||
| 	dataTypeMap         map[string]fullTypeRef                                   // list of TData fields (only if TData is not an interface) | 	dataTypeMap         map[string]fullTypeRef                                   // list of TData fields (only if TData is not an interface) | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user