Compare commits
	
		
			90 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| d44e971325 | |||
| fe4cdc48af | |||
| 631006a4e1 | |||
| 567ead8697 | |||
| e4886b4a7d | |||
| dcb5d3d7cd | |||
| 15a639f85a | |||
| 303bd04649 | |||
| 7bda674939 | |||
| 126d4fbd0b | |||
| fed8bccaab | |||
| 47b6a6b508 | |||
| 764ce79a71 | |||
| b876c64ba2 | |||
| 8d52b41f57 | |||
| f47e2a33fe | |||
| 9321938dad | |||
| 3828d601a2 | |||
| 2e713c808d | |||
| 6602f86b43 | |||
| 24d9f0fdc7 | |||
| 8446b2da22 | |||
| 758e5a67b5 | |||
| 678ddd7124 | |||
| 36b71dfaf3 | |||
| 9491b72b8d | |||
| 6c4af4006b | |||
| 8bf3a337cf | |||
| 16146494dc | |||
| b0e443ad99 | |||
| 9955eacf96 | |||
| f0347a9435 | |||
| 7c869c65f3 | |||
| 14f39a9162 | |||
| dcd106c1cd | |||
| b704e2a362 | |||
| 6b4bd5a6f8 | |||
| 6df4f5f2a1 | |||
| 780905ba35 | |||
| c679797765 | |||
| 401aad9fa4 | |||
| 645113d553 | |||
| 4a33986b6a | |||
| c1c8c64c76 | |||
| 0927fdc4d7 | |||
| 102a280dda | |||
| f13384d794 | |||
| 409d6e108d | |||
| ed53f297bd | |||
| 42424f4bc2 | |||
| 9e5b8c5277 | |||
| 9abe28c490 | |||
| 422bbd8593 | |||
| 3956675e04 | |||
| 10c3780b52 | |||
| 8edc067a3b | |||
| 1007f2c834 | |||
| c25da03217 | |||
| 4b55dbaacf | |||
| c399fa42ae | |||
| 9e586f7706 | |||
| 3cc8dccc63 | |||
| 7fedfbca81 | |||
| 3c439ba428 | |||
| ad24f6db44 | |||
| 1869ff3d75 | |||
| 30ce8c4b60 | |||
| 885bb53244 | |||
| 1c7dc1820a | |||
| 7e16e799e4 | |||
| 890e16241d | |||
| b9d0348735 | |||
| b9e9575b9b | |||
| 295a098eb4 | |||
| b69a082bb1 | |||
| a4a8c83d17 | |||
| e952176bb0 | |||
| d99adb203b | |||
| f1f91f4cfa | |||
| 2afb265ea4 | |||
| be24f7a190 | |||
| aae8a706e9 | |||
| 7d64f18f54 | |||
| d08b2e565a | |||
| d29e84894d | |||
| 617298c366 | |||
| 668f308565 | |||
| 240a8ed7aa | |||
| 70de8e8d04 | |||
| d38fa60fbc | 
							
								
								
									
										2
									
								
								.idea/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.idea/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -6,3 +6,5 @@ | ||||
| # Datasource local storage ignored files | ||||
| /dataSources/ | ||||
| /dataSources.local.xml | ||||
| # GitHub Copilot persisted chat sessions | ||||
| /copilot/chatSessions | ||||
|   | ||||
| @@ -30,7 +30,7 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | ||||
| | confext     | Mike       | Parses environment configuration into structs                                                                 | | ||||
| | cmdext      | Mike       | Runner for external commands/processes                                                                        | | ||||
| |             |            |                                                                                                               | | ||||
| | sq          | Mike       | Utility functions for sql based databases                                                                     | | ||||
| | sq          | Mike       | Utility functions for sql based databases (primarily sqlite)                                                  | | ||||
| | tst         | Mike       | Utility functions for unit tests                                                                              | | ||||
| |             |            |                                                                                                               | | ||||
| | rfctime     | Mike       | Classes for time seriallization, with different marshallign method for mongo and json                         | | ||||
|   | ||||
							
								
								
									
										4
									
								
								TODO.md
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								TODO.md
									
									
									
									
									
								
							| @@ -2,6 +2,8 @@ | ||||
|  | ||||
|  - cronext | ||||
|  | ||||
|  - rfctime.DateOnly | ||||
|  - rfctime.HMSTimeOnly | ||||
|  - rfctime.NanoTimeOnly | ||||
|  | ||||
|  - remove sqlx dependency from sq  (unmaintained, and mostly superseeded by our own stuff?) | ||||
|  - Move DBLogger and DBPreprocessor to sq | ||||
| @@ -26,6 +26,10 @@ type CSIDDef struct { | ||||
| 	Prefix       string | ||||
| } | ||||
|  | ||||
| type CSIDGenOptions struct { | ||||
| 	DebugOutput *bool | ||||
| } | ||||
|  | ||||
| var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||
|  | ||||
| var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`)) | ||||
| @@ -35,7 +39,9 @@ var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGen | ||||
| //go:embed csid-generate.template | ||||
| var templateCSIDGenerateText string | ||||
|  | ||||
| func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | ||||
| func GenerateCharsetIDSpecs(sourceDir string, destFile string, opt CSIDGenOptions) error { | ||||
|  | ||||
| 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||
|  | ||||
| 	files, err := os.ReadDir(sourceDir) | ||||
| 	if err != nil { | ||||
| @@ -81,13 +87,18 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | ||||
| 	pkgname := "" | ||||
|  | ||||
| 	for _, f := range files { | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name())) | ||||
| 		} | ||||
|  | ||||
| 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
|  | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("\n") | ||||
| 		} | ||||
|  | ||||
| 		allIDs = append(allIDs, fileIDs...) | ||||
|  | ||||
| @@ -113,7 +124,7 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | ||||
| func processCSIDFile(basedir string, fn string, debugOutput bool) ([]CSIDDef, string, error) { | ||||
| 	file, err := os.Open(fn) | ||||
| 	if err != nil { | ||||
| 		return nil, "", err | ||||
| @@ -155,7 +166,11 @@ func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | ||||
| 				Name:         match.GroupByName("name").Value(), | ||||
| 				Prefix:       match.GroupByName("prefix").Value(), | ||||
| 			} | ||||
|  | ||||
| 			if debugOutput { | ||||
| 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||
| 			} | ||||
|  | ||||
| 			ids = append(ids, def) | ||||
| 		} | ||||
| 	} | ||||
|   | ||||
| @@ -34,10 +34,10 @@ func TestGenerateCSIDSpecs(t *testing.T) { | ||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | ||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | ||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	fmt.Println() | ||||
|   | ||||
| @@ -37,6 +37,11 @@ type EnumDef struct { | ||||
| 	Values       []EnumDefVal | ||||
| } | ||||
|  | ||||
| type EnumGenOptions struct { | ||||
| 	DebugOutput *bool | ||||
| 	GoFormat    *bool | ||||
| } | ||||
|  | ||||
| var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||
|  | ||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||
| @@ -48,7 +53,7 @@ var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerato | ||||
| //go:embed enum-generate.template | ||||
| var templateEnumGenerateText string | ||||
|  | ||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||
| func GenerateEnumSpecs(sourceDir string, destFile string, opt EnumGenOptions) error { | ||||
|  | ||||
| 	oldChecksum := "N/A" | ||||
| 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||
| @@ -61,7 +66,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, true) | ||||
| 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, langext.Coalesce(opt.GoFormat, true), langext.Coalesce(opt.DebugOutput, false)) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| @@ -78,7 +83,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool) (string, string, bool, error) { | ||||
| func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool, debugOutput bool) (string, string, bool, error) { | ||||
|  | ||||
| 	files, err := os.ReadDir(sourceDir) | ||||
| 	if err != nil { | ||||
| @@ -113,13 +118,18 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g | ||||
| 	pkgname := "" | ||||
|  | ||||
| 	for _, f := range files { | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name())) | ||||
| 		} | ||||
|  | ||||
| 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||
| 		if err != nil { | ||||
| 			return "", "", false, err | ||||
| 		} | ||||
|  | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("\n") | ||||
| 		} | ||||
|  | ||||
| 		allEnums = append(allEnums, fileEnums...) | ||||
|  | ||||
| @@ -146,7 +156,7 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g | ||||
| 	return string(fdata), newChecksum, true, nil | ||||
| } | ||||
|  | ||||
| func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | ||||
| func processEnumFile(basedir string, fn string, debugOutput bool) ([]EnumDef, string, error) { | ||||
| 	file, err := os.Open(fn) | ||||
| 	if err != nil { | ||||
| 		return nil, "", err | ||||
| @@ -190,8 +200,11 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | ||||
| 				Values:       make([]EnumDefVal, 0), | ||||
| 			} | ||||
| 			enums = append(enums, def) | ||||
|  | ||||
| 			if debugOutput { | ||||
| 				fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | ||||
| 			typename := match.GroupByName("type").Value() | ||||
| @@ -230,19 +243,24 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | ||||
| 				if v.EnumTypeName == typename { | ||||
| 					enums[i].Values = append(enums[i].Values, def) | ||||
| 					found = true | ||||
|  | ||||
| 					if debugOutput { | ||||
| 						if def.Description != nil { | ||||
| 							fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | ||||
| 						} else { | ||||
| 							fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | ||||
| 						} | ||||
| 					} | ||||
| 					break | ||||
| 				} | ||||
| 			} | ||||
| 			if !found { | ||||
| 				if debugOutput { | ||||
| 					fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return enums, pkgname, nil | ||||
| } | ||||
|   | ||||
| @@ -7,6 +7,8 @@ import "gogs.mikescher.com/BlackForestBytes/goext/enums" | ||||
|  | ||||
| const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||
|  | ||||
| {{ $pkgname  := .PkgName }} | ||||
|  | ||||
| {{range .Enums}} | ||||
|  | ||||
| {{ $hasStr   := ( . | hasStr   ) }} | ||||
| @@ -97,6 +99,14 @@ func (e {{.EnumTypeName}}) VarName() string { | ||||
| 	return "" | ||||
| } | ||||
|  | ||||
| func (e {{.EnumTypeName}}) TypeName() string { | ||||
| 	return "{{$typename}}" | ||||
| } | ||||
|  | ||||
| func (e {{.EnumTypeName}}) PackageName() string { | ||||
| 	return "{{$pkgname }}" | ||||
| } | ||||
|  | ||||
| func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | ||||
|     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} | ||||
| } | ||||
| @@ -135,3 +145,11 @@ func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue { | ||||
| {{end}} | ||||
|  | ||||
| {{end}} | ||||
|  | ||||
| // ================================ ================= ================================ | ||||
|  | ||||
| func AllPackageEnums() []enums.Enum { | ||||
|     return []enums.Enum{ {{range .Enums}} | ||||
|         {{ if gt (len .Values) 0 }} {{  $v := index .Values 0 }} {{ $v.VarName}}, {{end}} // {{ .EnumTypeName }} {{end}} | ||||
|     } | ||||
| } | ||||
| @@ -37,10 +37,10 @@ func TestGenerateEnumSpecs(t *testing.T) { | ||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | ||||
| 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | ||||
| 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	tst.AssertEqual(t, cs1, cs2) | ||||
| @@ -76,7 +76,7 @@ func TestGenerateEnumSpecsData(t *testing.T) { | ||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true) | ||||
| 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true, true) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	fmt.Println() | ||||
|   | ||||
| @@ -25,6 +25,10 @@ type IDDef struct { | ||||
| 	Name         string | ||||
| } | ||||
|  | ||||
| type IDGenOptions struct { | ||||
| 	DebugOutput *bool | ||||
| } | ||||
|  | ||||
| var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||
|  | ||||
| var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`)) | ||||
| @@ -34,7 +38,9 @@ var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = | ||||
| //go:embed id-generate.template | ||||
| var templateIDGenerateText string | ||||
|  | ||||
| func GenerateIDSpecs(sourceDir string, destFile string) error { | ||||
| func GenerateIDSpecs(sourceDir string, destFile string, opt IDGenOptions) error { | ||||
|  | ||||
| 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||
|  | ||||
| 	files, err := os.ReadDir(sourceDir) | ||||
| 	if err != nil { | ||||
| @@ -80,13 +86,18 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | ||||
| 	pkgname := "" | ||||
|  | ||||
| 	for _, f := range files { | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name())) | ||||
| 		} | ||||
|  | ||||
| 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
|  | ||||
| 		if debugOutput { | ||||
| 			fmt.Printf("\n") | ||||
| 		} | ||||
|  | ||||
| 		allIDs = append(allIDs, fileIDs...) | ||||
|  | ||||
| @@ -112,7 +123,7 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | ||||
| func processIDFile(basedir string, fn string, debugOutput bool) ([]IDDef, string, error) { | ||||
| 	file, err := os.Open(fn) | ||||
| 	if err != nil { | ||||
| 		return nil, "", err | ||||
| @@ -153,7 +164,11 @@ func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | ||||
| 				FileRelative: rfp, | ||||
| 				Name:         match.GroupByName("name").Value(), | ||||
| 			} | ||||
|  | ||||
| 			if debugOutput { | ||||
| 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||
| 			} | ||||
|  | ||||
| 			ids = append(ids, def) | ||||
| 		} | ||||
| 	} | ||||
|   | ||||
| @@ -34,10 +34,10 @@ func TestGenerateIDSpecs(t *testing.T) { | ||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | ||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", IDGenOptions{DebugOutput: langext.PTrue}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | ||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", IDGenOptions{DebugOutput: langext.PTrue}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	fmt.Println() | ||||
|   | ||||
							
								
								
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,263 @@ | ||||
| package cryptext | ||||
|  | ||||
| import ( | ||||
| 	"crypto/rand" | ||||
| 	"io" | ||||
| 	"math/big" | ||||
| 	mathrand "math/rand" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	ppStartChar            = "BCDFGHJKLMNPQRSTVWXZ" | ||||
| 	ppEndChar              = "ABDEFIKMNORSTUXYZ" | ||||
| 	ppVowel                = "AEIOUY" | ||||
| 	ppConsonant            = "BCDFGHJKLMNPQRSTVWXZ" | ||||
| 	ppSegmentLenMin        = 3 | ||||
| 	ppSegmentLenMax        = 7 | ||||
| 	ppMaxRepeatedVowel     = 2 | ||||
| 	ppMaxRepeatedConsonant = 2 | ||||
| ) | ||||
|  | ||||
| var ppContinuation = map[uint8]string{ | ||||
| 	'A': "BCDFGHJKLMNPRSTVWXYZ", | ||||
| 	'B': "ADFIKLMNORSTUY", | ||||
| 	'C': "AEIKOUY", | ||||
| 	'D': "AEILORSUYZ", | ||||
| 	'E': "BCDFGHJKLMNPRSTVWXYZ", | ||||
| 	'F': "ADEGIKLOPRTUY", | ||||
| 	'G': "ABDEFHILMNORSTUY", | ||||
| 	'H': "AEIOUY", | ||||
| 	'I': "BCDFGHJKLMNPRSTVWXZ", | ||||
| 	'J': "AEIOUY", | ||||
| 	'K': "ADEFHILMNORSTUY", | ||||
| 	'L': "ADEFGIJKMNOPSTUVWYZ", | ||||
| 	'M': "ABEFIKOPSTUY", | ||||
| 	'N': "ABEFIKOPSTUY", | ||||
| 	'O': "BCDFGHJKLMNPRSTVWXYZ", | ||||
| 	'P': "AEFIJLORSTUY", | ||||
| 	'Q': "AEIOUY", | ||||
| 	'R': "ADEFGHIJKLMNOPSTUVYZ", | ||||
| 	'S': "ACDEIKLOPTUYZ", | ||||
| 	'T': "AEHIJOPRSUWY", | ||||
| 	'U': "BCDFGHJKLMNPRSTVWXZ", | ||||
| 	'V': "AEIOUY", | ||||
| 	'W': "AEIOUY", | ||||
| 	'X': "AEIOUY", | ||||
| 	'Y': "ABCDFGHKLMNPRSTVXZ", | ||||
| 	'Z': "AEILOTUY", | ||||
| } | ||||
|  | ||||
| var ppLog2Map = map[int]float64{ | ||||
| 	1:  0.00000000, | ||||
| 	2:  1.00000000, | ||||
| 	3:  1.58496250, | ||||
| 	4:  2.00000000, | ||||
| 	5:  2.32192809, | ||||
| 	6:  2.58496250, | ||||
| 	7:  2.80735492, | ||||
| 	8:  3.00000000, | ||||
| 	9:  3.16992500, | ||||
| 	10: 3.32192809, | ||||
| 	11: 3.45943162, | ||||
| 	12: 3.58496250, | ||||
| 	13: 3.70043972, | ||||
| 	14: 3.80735492, | ||||
| 	15: 3.90689060, | ||||
| 	16: 4.00000000, | ||||
| 	17: 4.08746284, | ||||
| 	18: 4.16992500, | ||||
| 	19: 4.24792751, | ||||
| 	20: 4.32192809, | ||||
| 	21: 4.39231742, | ||||
| 	22: 4.45943162, | ||||
| 	23: 4.52356196, | ||||
| 	24: 4.58496250, | ||||
| 	25: 4.64385619, | ||||
| 	26: 4.70043972, | ||||
| 	27: 4.75488750, | ||||
| 	28: 4.80735492, | ||||
| 	29: 4.85798100, | ||||
| 	30: 4.90689060, | ||||
| 	31: 4.95419631, | ||||
| 	32: 5.00000000, | ||||
| } | ||||
|  | ||||
| var ( | ||||
| 	ppVowelMap     = ppMakeSet(ppVowel) | ||||
| 	ppConsonantMap = ppMakeSet(ppConsonant) | ||||
| 	ppEndCharMap   = ppMakeSet(ppEndChar) | ||||
| ) | ||||
|  | ||||
| func ppMakeSet(v string) map[uint8]bool { | ||||
| 	mp := make(map[uint8]bool, len(v)) | ||||
| 	for _, chr := range v { | ||||
| 		mp[uint8(chr)] = true | ||||
| 	} | ||||
| 	return mp | ||||
| } | ||||
|  | ||||
| func ppRandInt(rng io.Reader, max int) int { | ||||
| 	v, err := rand.Int(rng, big.NewInt(int64(max))) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return int(v.Int64()) | ||||
| } | ||||
|  | ||||
| func ppRand(rng io.Reader, chars string, entropy *float64) uint8 { | ||||
| 	chr := chars[ppRandInt(rng, len(chars))] | ||||
|  | ||||
| 	*entropy = *entropy + ppLog2Map[len(chars)] | ||||
|  | ||||
| 	return chr | ||||
| } | ||||
|  | ||||
| func ppCharType(chr uint8) (bool, bool) { | ||||
| 	_, ok1 := ppVowelMap[chr] | ||||
| 	_, ok2 := ppConsonantMap[chr] | ||||
|  | ||||
| 	return ok1, ok2 | ||||
| } | ||||
|  | ||||
| func ppCharsetRemove(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||
| 	result := "" | ||||
| 	for _, chr := range cs { | ||||
| 		if _, ok := set[uint8(chr)]; !ok { | ||||
| 			result += string(chr) | ||||
| 		} | ||||
| 	} | ||||
| 	if result == "" && !allowEmpty { | ||||
| 		return cs | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func ppCharsetFilter(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||
| 	result := "" | ||||
| 	for _, chr := range cs { | ||||
| 		if _, ok := set[uint8(chr)]; ok { | ||||
| 			result += string(chr) | ||||
| 		} | ||||
| 	} | ||||
| 	if result == "" && !allowEmpty { | ||||
| 		return cs | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func PronouncablePasswordExt(rng io.Reader, pwlen int) (string, float64) { | ||||
|  | ||||
| 	// kinda pseudo markov-chain - with a few extra rules and no weights... | ||||
|  | ||||
| 	if pwlen <= 0 { | ||||
| 		return "", 0 | ||||
| 	} | ||||
|  | ||||
| 	vowelCount := 0 | ||||
| 	consoCount := 0 | ||||
| 	entropy := float64(0) | ||||
|  | ||||
| 	startChar := ppRand(rng, ppStartChar, &entropy) | ||||
|  | ||||
| 	result := string(startChar) | ||||
| 	currentChar := startChar | ||||
|  | ||||
| 	isVowel, isConsonant := ppCharType(currentChar) | ||||
| 	if isVowel { | ||||
| 		vowelCount = 1 | ||||
| 	} | ||||
| 	if isConsonant { | ||||
| 		consoCount = ppMaxRepeatedConsonant | ||||
| 	} | ||||
|  | ||||
| 	segmentLen := 1 | ||||
|  | ||||
| 	segmentLenTarget := ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||
|  | ||||
| 	for len(result) < pwlen { | ||||
|  | ||||
| 		charset := ppContinuation[currentChar] | ||||
| 		if vowelCount >= ppMaxRepeatedVowel { | ||||
| 			charset = ppCharsetRemove(charset, ppVowelMap, false) | ||||
| 		} | ||||
| 		if consoCount >= ppMaxRepeatedConsonant { | ||||
| 			charset = ppCharsetRemove(charset, ppConsonantMap, false) | ||||
| 		} | ||||
|  | ||||
| 		lastOfSegment := false | ||||
| 		newSegment := false | ||||
|  | ||||
| 		if len(result)+1 == pwlen { | ||||
| 			// last of result | ||||
| 			charset = ppCharsetFilter(charset, ppEndCharMap, false) | ||||
| 		} else if segmentLen+1 == segmentLenTarget { | ||||
| 			// last of segment | ||||
| 			charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||
| 			if charsetNew != "" { | ||||
| 				charset = charsetNew | ||||
| 				lastOfSegment = true | ||||
| 			} | ||||
| 		} else if segmentLen >= segmentLenTarget { | ||||
| 			// (perhaps) start of new segment | ||||
| 			if _, ok := ppEndCharMap[currentChar]; ok { | ||||
| 				charset = ppStartChar | ||||
| 				newSegment = true | ||||
| 			} else { | ||||
| 				// continue segment for one more char to (hopefully) find an end-char | ||||
| 				charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||
| 				if charsetNew != "" { | ||||
| 					charset = charsetNew | ||||
| 					lastOfSegment = true | ||||
| 				} | ||||
| 			} | ||||
| 		} else { | ||||
| 			// normal continuation | ||||
| 		} | ||||
|  | ||||
| 		newChar := ppRand(rng, charset, &entropy) | ||||
| 		if lastOfSegment { | ||||
| 			currentChar = newChar | ||||
| 			segmentLen++ | ||||
| 			result += strings.ToLower(string(newChar)) | ||||
| 		} else if newSegment { | ||||
| 			currentChar = newChar | ||||
| 			segmentLen = 1 | ||||
| 			result += strings.ToUpper(string(newChar)) | ||||
| 			segmentLenTarget = ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||
| 			vowelCount = 0 | ||||
| 			consoCount = 0 | ||||
| 		} else { | ||||
| 			currentChar = newChar | ||||
| 			segmentLen++ | ||||
| 			result += strings.ToLower(string(newChar)) | ||||
| 		} | ||||
|  | ||||
| 		isVowel, isConsonant := ppCharType(currentChar) | ||||
| 		if isVowel { | ||||
| 			vowelCount++ | ||||
| 			consoCount = 0 | ||||
| 		} | ||||
| 		if isConsonant { | ||||
| 			vowelCount = 0 | ||||
| 			if newSegment { | ||||
| 				consoCount = ppMaxRepeatedConsonant | ||||
| 			} else { | ||||
| 				consoCount++ | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result, entropy | ||||
| } | ||||
|  | ||||
| func PronouncablePassword(len int) string { | ||||
| 	v, _ := PronouncablePasswordExt(rand.Reader, len) | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| func PronouncablePasswordSeeded(seed int64, len int) string { | ||||
|  | ||||
| 	v, _ := PronouncablePasswordExt(mathrand.New(mathrand.NewSource(seed)), len) | ||||
| 	return v | ||||
| } | ||||
							
								
								
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| package cryptext | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"math/rand" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| func TestPronouncablePasswordExt(t *testing.T) { | ||||
| 	for i := 0; i < 20; i++ { | ||||
| 		pw, entropy := PronouncablePasswordExt(rand.New(rand.NewSource(int64(i))), 16) | ||||
| 		fmt.Printf("[%.2f] => %s\n", entropy, pw) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPronouncablePasswordSeeded(t *testing.T) { | ||||
| 	for i := 0; i < 20; i++ { | ||||
| 		pw := PronouncablePasswordSeeded(int64(i), 8) | ||||
| 		fmt.Printf("%s\n", pw) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPronouncablePassword(t *testing.T) { | ||||
| 	for i := 0; i < 20; i++ { | ||||
| 		pw := PronouncablePassword(i + 1) | ||||
| 		fmt.Printf("%s\n", pw) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPronouncablePasswordWrongLen(t *testing.T) { | ||||
| 	PronouncablePassword(0) | ||||
| 	PronouncablePassword(-1) | ||||
| 	PronouncablePassword(-2) | ||||
| 	PronouncablePassword(-3) | ||||
| } | ||||
							
								
								
									
										254
									
								
								dataext/casMutex.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										254
									
								
								dataext/casMutex.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,254 @@ | ||||
| package dataext | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"golang.org/x/sync/semaphore" | ||||
| 	"runtime" | ||||
| 	"sync" | ||||
| 	"sync/atomic" | ||||
| 	"time" | ||||
| 	"unsafe" | ||||
| ) | ||||
|  | ||||
| // from https://github.com/viney-shih/go-lock/blob/2f19fd8ce335e33e0ab9dccb1ff2ce820c3da332/cas.go | ||||
|  | ||||
| // CASMutex is the struct implementing RWMutex with CAS mechanism. | ||||
| type CASMutex struct { | ||||
| 	state     casState | ||||
| 	turnstile *semaphore.Weighted | ||||
|  | ||||
| 	broadcastChan chan struct{} | ||||
| 	broadcastMut  sync.RWMutex | ||||
| } | ||||
|  | ||||
| func NewCASMutex() *CASMutex { | ||||
| 	return &CASMutex{ | ||||
| 		state:         casStateNoLock, | ||||
| 		turnstile:     semaphore.NewWeighted(1), | ||||
| 		broadcastChan: make(chan struct{}), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| type casState int32 | ||||
|  | ||||
| const ( | ||||
| 	casStateUndefined casState = iota - 2 // -2 | ||||
| 	casStateWriteLock                     // -1 | ||||
| 	casStateNoLock                        // 0 | ||||
| 	casStateReadLock                      // >= 1 | ||||
| ) | ||||
|  | ||||
| func (m *CASMutex) getState(n int32) casState { | ||||
| 	switch st := casState(n); { | ||||
| 	case st == casStateWriteLock: | ||||
| 		fallthrough | ||||
| 	case st == casStateNoLock: | ||||
| 		return st | ||||
| 	case st >= casStateReadLock: | ||||
| 		return casStateReadLock | ||||
| 	default: | ||||
| 		// actually, it should not happened. | ||||
| 		return casStateUndefined | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (m *CASMutex) listen() <-chan struct{} { | ||||
| 	m.broadcastMut.RLock() | ||||
| 	defer m.broadcastMut.RUnlock() | ||||
|  | ||||
| 	return m.broadcastChan | ||||
| } | ||||
|  | ||||
| func (m *CASMutex) broadcast() { | ||||
| 	newCh := make(chan struct{}) | ||||
|  | ||||
| 	m.broadcastMut.Lock() | ||||
| 	ch := m.broadcastChan | ||||
| 	m.broadcastChan = newCh | ||||
| 	m.broadcastMut.Unlock() | ||||
|  | ||||
| 	close(ch) | ||||
| } | ||||
|  | ||||
| func (m *CASMutex) tryLock(ctx context.Context) bool { | ||||
| 	for { | ||||
| 		broker := m.listen() | ||||
| 		if atomic.CompareAndSwapInt32( | ||||
| 			(*int32)(unsafe.Pointer(&m.state)), | ||||
| 			int32(casStateNoLock), | ||||
| 			int32(casStateWriteLock), | ||||
| 		) { | ||||
| 			return true | ||||
| 		} | ||||
|  | ||||
| 		if ctx == nil { | ||||
| 			return false | ||||
| 		} | ||||
|  | ||||
| 		select { | ||||
| 		case <-ctx.Done(): | ||||
| 			// timeout or cancellation | ||||
| 			return false | ||||
| 		case <-broker: | ||||
| 			// waiting for signal triggered by m.broadcast() and trying again. | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // TryLockWithContext attempts to acquire the lock, blocking until resources | ||||
| // are available or ctx is done (timeout or cancellation). | ||||
| func (m *CASMutex) TryLockWithContext(ctx context.Context) bool { | ||||
| 	if err := m.turnstile.Acquire(ctx, 1); err != nil { | ||||
| 		// Acquire failed due to timeout or cancellation | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	defer m.turnstile.Release(1) | ||||
|  | ||||
| 	return m.tryLock(ctx) | ||||
| } | ||||
|  | ||||
| // Lock acquires the lock. | ||||
| // If it is currently held by others, Lock will wait until it has a chance to acquire it. | ||||
| func (m *CASMutex) Lock() { | ||||
| 	ctx := context.Background() | ||||
|  | ||||
| 	m.TryLockWithContext(ctx) | ||||
| } | ||||
|  | ||||
| // TryLock attempts to acquire the lock without blocking. | ||||
| // Return false if someone is holding it now. | ||||
| func (m *CASMutex) TryLock() bool { | ||||
| 	if !m.turnstile.TryAcquire(1) { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	defer m.turnstile.Release(1) | ||||
|  | ||||
| 	return m.tryLock(nil) | ||||
| } | ||||
|  | ||||
| // TryLockWithTimeout attempts to acquire the lock within a period of time. | ||||
| // Return false if spending time is more than duration and no chance to acquire it. | ||||
| func (m *CASMutex) TryLockWithTimeout(duration time.Duration) bool { | ||||
| 	ctx, cancel := context.WithTimeout(context.Background(), duration) | ||||
| 	defer cancel() | ||||
|  | ||||
| 	return m.TryLockWithContext(ctx) | ||||
| } | ||||
|  | ||||
| // Unlock releases the lock. | ||||
| func (m *CASMutex) Unlock() { | ||||
| 	if ok := atomic.CompareAndSwapInt32( | ||||
| 		(*int32)(unsafe.Pointer(&m.state)), | ||||
| 		int32(casStateWriteLock), | ||||
| 		int32(casStateNoLock), | ||||
| 	); !ok { | ||||
| 		panic("Unlock failed") | ||||
| 	} | ||||
|  | ||||
| 	m.broadcast() | ||||
| } | ||||
|  | ||||
| func (m *CASMutex) rTryLock(ctx context.Context) bool { | ||||
| 	for { | ||||
| 		broker := m.listen() | ||||
| 		n := atomic.LoadInt32((*int32)(unsafe.Pointer(&m.state))) | ||||
| 		st := m.getState(n) | ||||
| 		switch st { | ||||
| 		case casStateNoLock, casStateReadLock: | ||||
| 			if atomic.CompareAndSwapInt32((*int32)(unsafe.Pointer(&m.state)), n, n+1) { | ||||
| 				return true | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		if ctx == nil { | ||||
| 			return false | ||||
| 		} | ||||
|  | ||||
| 		select { | ||||
| 		case <-ctx.Done(): | ||||
| 			// timeout or cancellation | ||||
| 			return false | ||||
| 		default: | ||||
| 			switch st { | ||||
| 			// read-lock failed due to concurrence issue, try again immediately | ||||
| 			case casStateNoLock, casStateReadLock: | ||||
| 				runtime.Gosched() // allow other goroutines to do stuff. | ||||
| 				continue | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		select { | ||||
| 		case <-ctx.Done(): | ||||
| 			// timeout or cancellation | ||||
| 			return false | ||||
| 		case <-broker: | ||||
| 			// waiting for signal triggered by m.broadcast() and trying again. | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // RTryLockWithContext attempts to acquire the read lock, blocking until resources | ||||
| // are available or ctx is done (timeout or cancellation). | ||||
| func (m *CASMutex) RTryLockWithContext(ctx context.Context) bool { | ||||
| 	if err := m.turnstile.Acquire(ctx, 1); err != nil { | ||||
| 		// Acquire failed due to timeout or cancellation | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	m.turnstile.Release(1) | ||||
|  | ||||
| 	return m.rTryLock(ctx) | ||||
| } | ||||
|  | ||||
| // RLock acquires the read lock. | ||||
| // If it is currently held by others writing, RLock will wait until it has a chance to acquire it. | ||||
| func (m *CASMutex) RLock() { | ||||
| 	ctx := context.Background() | ||||
|  | ||||
| 	m.RTryLockWithContext(ctx) | ||||
| } | ||||
|  | ||||
| // RTryLock attempts to acquire the read lock without blocking. | ||||
| // Return false if someone is writing it now. | ||||
| func (m *CASMutex) RTryLock() bool { | ||||
| 	if !m.turnstile.TryAcquire(1) { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	m.turnstile.Release(1) | ||||
|  | ||||
| 	return m.rTryLock(nil) | ||||
| } | ||||
|  | ||||
| // RTryLockWithTimeout attempts to acquire the read lock within a period of time. | ||||
| // Return false if spending time is more than duration and no chance to acquire it. | ||||
| func (m *CASMutex) RTryLockWithTimeout(duration time.Duration) bool { | ||||
| 	ctx, cancel := context.WithTimeout(context.Background(), duration) | ||||
| 	defer cancel() | ||||
|  | ||||
| 	return m.RTryLockWithContext(ctx) | ||||
| } | ||||
|  | ||||
| // RUnlock releases the read lock. | ||||
| func (m *CASMutex) RUnlock() { | ||||
| 	n := atomic.AddInt32((*int32)(unsafe.Pointer(&m.state)), -1) | ||||
| 	switch m.getState(n) { | ||||
| 	case casStateUndefined, casStateWriteLock: | ||||
| 		panic("RUnlock failed") | ||||
| 	case casStateNoLock: | ||||
| 		m.broadcast() | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // RLocker returns a Locker interface that implements the Lock and Unlock methods | ||||
| // by calling CASMutex.RLock and CASMutex.RUnlock. | ||||
| func (m *CASMutex) RLocker() sync.Locker { | ||||
| 	return (*rlocker)(m) | ||||
| } | ||||
|  | ||||
| type rlocker CASMutex | ||||
|  | ||||
| func (r *rlocker) Lock()   { (*CASMutex)(r).RLock() } | ||||
| func (r *rlocker) Unlock() { (*CASMutex)(r).RUnlock() } | ||||
							
								
								
									
										59
									
								
								dataext/optional.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								dataext/optional.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | ||||
| package dataext | ||||
|  | ||||
| import ( | ||||
| 	"encoding/json" | ||||
| 	"errors" | ||||
| ) | ||||
|  | ||||
| type JsonOpt[T any] struct { | ||||
| 	isSet bool | ||||
| 	value T | ||||
| } | ||||
|  | ||||
| // MarshalJSON returns m as the JSON encoding of m. | ||||
| func (m JsonOpt[T]) MarshalJSON() ([]byte, error) { | ||||
| 	if !m.isSet { | ||||
| 		return []byte("null"), nil // actually this would be undefined - but undefined is not valid JSON | ||||
| 	} | ||||
|  | ||||
| 	return json.Marshal(m.value) | ||||
| } | ||||
|  | ||||
| // UnmarshalJSON sets *m to a copy of data. | ||||
| func (m *JsonOpt[T]) UnmarshalJSON(data []byte) error { | ||||
| 	if m == nil { | ||||
| 		return errors.New("JsonOpt: UnmarshalJSON on nil pointer") | ||||
| 	} | ||||
|  | ||||
| 	m.isSet = true | ||||
| 	return json.Unmarshal(data, &m.value) | ||||
| } | ||||
|  | ||||
| func (m JsonOpt[T]) IsSet() bool { | ||||
| 	return m.isSet | ||||
| } | ||||
|  | ||||
| func (m JsonOpt[T]) IsUnset() bool { | ||||
| 	return !m.isSet | ||||
| } | ||||
|  | ||||
| func (m JsonOpt[T]) Value() (T, bool) { | ||||
| 	if !m.isSet { | ||||
| 		return *new(T), false | ||||
| 	} | ||||
| 	return m.value, true | ||||
| } | ||||
|  | ||||
| func (m JsonOpt[T]) ValueOrNil() *T { | ||||
| 	if !m.isSet { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return &m.value | ||||
| } | ||||
|  | ||||
| func (m JsonOpt[T]) MustValue() T { | ||||
| 	if !m.isSet { | ||||
| 		panic("value not set") | ||||
| 	} | ||||
| 	return m.value | ||||
| } | ||||
							
								
								
									
										163
									
								
								dataext/syncMap.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										163
									
								
								dataext/syncMap.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,163 @@ | ||||
| package dataext | ||||
|  | ||||
| import "sync" | ||||
|  | ||||
| type SyncMap[TKey comparable, TData any] struct { | ||||
| 	data map[TKey]TData | ||||
| 	lock sync.Mutex | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) Set(key TKey, data TData) { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	s.data[key] = data | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) SetIfNotContains(key TKey, data TData) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	if _, existsInPreState := s.data[key]; existsInPreState { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	s.data[key] = data | ||||
|  | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) SetIfNotContainsFunc(key TKey, data func() TData) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	if _, existsInPreState := s.data[key]; existsInPreState { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	s.data[key] = data() | ||||
|  | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	if v, ok := s.data[key]; ok { | ||||
| 		return v, true | ||||
| 	} else { | ||||
| 		return *new(TData), false | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) GetAndSetIfNotContains(key TKey, data TData) TData { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	if v, ok := s.data[key]; ok { | ||||
| 		return v | ||||
| 	} else { | ||||
| 		s.data[key] = data | ||||
| 		return data | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) GetAndSetIfNotContainsFunc(key TKey, data func() TData) TData { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	if v, ok := s.data[key]; ok { | ||||
| 		return v | ||||
| 	} else { | ||||
| 		dataObj := data() | ||||
| 		s.data[key] = dataObj | ||||
| 		return dataObj | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) Delete(key TKey) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	_, ok := s.data[key] | ||||
|  | ||||
| 	delete(s.data, key) | ||||
|  | ||||
| 	return ok | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) Contains(key TKey) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	_, ok := s.data[key] | ||||
|  | ||||
| 	return ok | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) GetAllKeys() []TKey { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	r := make([]TKey, 0, len(s.data)) | ||||
|  | ||||
| 	for k := range s.data { | ||||
| 		r = append(r, k) | ||||
| 	} | ||||
|  | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) GetAllValues() []TData { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TKey]TData) | ||||
| 	} | ||||
|  | ||||
| 	r := make([]TData, 0, len(s.data)) | ||||
|  | ||||
| 	for _, v := range s.data { | ||||
| 		r = append(r, v) | ||||
| 	} | ||||
|  | ||||
| 	return r | ||||
| } | ||||
| @@ -5,6 +5,8 @@ type Enum interface { | ||||
| 	ValuesAny() []any | ||||
| 	ValuesMeta() []EnumMetaValue | ||||
| 	VarName() string | ||||
| 	TypeName() string | ||||
| 	PackageName() string | ||||
| } | ||||
|  | ||||
| type StringEnum interface { | ||||
|   | ||||
| @@ -68,8 +68,10 @@ func init() { | ||||
| } | ||||
|  | ||||
| type Builder struct { | ||||
| 	wrappedErr          error | ||||
| 	errorData           *ExErr | ||||
| 	containsGinData     bool | ||||
| 	containsContextData bool | ||||
| 	noLog               bool | ||||
| } | ||||
|  | ||||
| @@ -89,9 +91,9 @@ func Wrap(err error, msg string) *Builder { | ||||
| 	if !pkgconfig.RecursiveErrors { | ||||
| 		v := FromError(err) | ||||
| 		v.Message = msg | ||||
| 		return &Builder{errorData: v} | ||||
| 		return &Builder{wrappedErr: err, errorData: v} | ||||
| 	} | ||||
| 	return &Builder{errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | ||||
| 	return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
| @@ -308,27 +310,27 @@ func (b *Builder) Errs(key string, val []error) *Builder { | ||||
| func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder { | ||||
| 	if v := ctx.Value("start_timestamp"); v != nil { | ||||
| 		if t, ok := v.(time.Time); ok { | ||||
| 			b.Time("ctx.startTimestamp", t) | ||||
| 			b.Time("ctx.endTimestamp", time.Now()) | ||||
| 			b.Time("ctx_startTimestamp", t) | ||||
| 			b.Time("ctx_endTimestamp", time.Now()) | ||||
| 		} | ||||
| 	} | ||||
| 	b.Str("gin.method", req.Method) | ||||
| 	b.Str("gin.path", g.FullPath()) | ||||
| 	b.Strs("gin.header", extractHeader(g.Request.Header)) | ||||
| 	b.Str("gin_method", req.Method) | ||||
| 	b.Str("gin_path", g.FullPath()) | ||||
| 	b.Strs("gin_header", extractHeader(g.Request.Header)) | ||||
| 	if req.URL != nil { | ||||
| 		b.Str("gin.url", req.URL.String()) | ||||
| 		b.Str("gin_url", req.URL.String()) | ||||
| 	} | ||||
| 	if ctxVal := g.GetString("apiversion"); ctxVal != "" { | ||||
| 		b.Str("gin.context.apiversion", ctxVal) | ||||
| 		b.Str("gin_context_apiversion", ctxVal) | ||||
| 	} | ||||
| 	if ctxVal := g.GetString("uid"); ctxVal != "" { | ||||
| 		b.Str("gin.context.uid", ctxVal) | ||||
| 		b.Str("gin_context_uid", ctxVal) | ||||
| 	} | ||||
| 	if ctxVal := g.GetString("fcmId"); ctxVal != "" { | ||||
| 		b.Str("gin.context.fcmid", ctxVal) | ||||
| 		b.Str("gin_context_fcmid", ctxVal) | ||||
| 	} | ||||
| 	if ctxVal := g.GetString("reqid"); ctxVal != "" { | ||||
| 		b.Str("gin.context.reqid", ctxVal) | ||||
| 		b.Str("gin_context_reqid", ctxVal) | ||||
| 	} | ||||
| 	if req.Method != "GET" && req.Body != nil { | ||||
|  | ||||
| @@ -339,12 +341,12 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | ||||
| 						var prettyJSON bytes.Buffer | ||||
| 						err = json.Indent(&prettyJSON, bin, "", "  ") | ||||
| 						if err == nil { | ||||
| 							b.Str("gin.body", string(prettyJSON.Bytes())) | ||||
| 							b.Str("gin_body", string(prettyJSON.Bytes())) | ||||
| 						} else { | ||||
| 							b.Bytes("gin.body", bin) | ||||
| 							b.Bytes("gin_body", bin) | ||||
| 						} | ||||
| 					} else { | ||||
| 						b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | ||||
| 						b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | ||||
| 					} | ||||
| 				} | ||||
| 			} | ||||
| @@ -354,9 +356,9 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | ||||
| 			if brc, ok := req.Body.(dataext.BufferedReadCloser); ok { | ||||
| 				if bin, err := brc.BufferedAll(); err == nil { | ||||
| 					if len(bin) < 16*1024 { | ||||
| 						b.Bytes("gin.body", bin) | ||||
| 						b.Bytes("gin_body", bin) | ||||
| 					} else { | ||||
| 						b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | ||||
| 						b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | ||||
| 					} | ||||
| 				} | ||||
| 			} | ||||
| @@ -364,10 +366,20 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	pkgconfig.ExtendGinMeta(ctx, b, g, req) | ||||
|  | ||||
| 	b.containsGinData = true | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *Builder) CtxData(method Method, ctx context.Context) *Builder { | ||||
| 	pkgconfig.ExtendContextMeta(b, method, ctx) | ||||
|  | ||||
| 	b.containsContextData = true | ||||
|  | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func formatHeader(header map[string][]string) string { | ||||
| 	ml := 1 | ||||
| 	for k, _ := range header { | ||||
| @@ -411,16 +423,24 @@ func extractHeader(header map[string][]string) []string { | ||||
| // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | ||||
| // Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces | ||||
| // Can be locally suppressed with Builder.NoLog() | ||||
| func (b *Builder) Build() error { | ||||
| func (b *Builder) Build(ctxs ...context.Context) error { | ||||
| 	warnOnPkgConfigNotInitialized() | ||||
|  | ||||
| 	for _, dctx := range ctxs { | ||||
| 		b.CtxData(MethodBuild, dctx) | ||||
| 	} | ||||
|  | ||||
| 	if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil { | ||||
| 		return b.wrappedErr | ||||
| 	} | ||||
|  | ||||
| 	if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | ||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||
| 	} else if pkgconfig.ZeroLogAllTraces && !b.noLog { | ||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||
| 	} | ||||
|  | ||||
| 	b.CallListener(MethodBuild) | ||||
| 	b.errorData.CallListener(MethodBuild) | ||||
|  | ||||
| 	return b.errorData | ||||
| } | ||||
| @@ -434,27 +454,35 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) { | ||||
| 		b.GinReq(ctx, g, g.Request) | ||||
| 	} | ||||
|  | ||||
| 	b.CtxData(MethodOutput, ctx) | ||||
|  | ||||
| 	b.errorData.Output(g) | ||||
|  | ||||
| 	if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal { | ||||
| 	if (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) && (pkgconfig.ZeroLogErrGinOutput || pkgconfig.ZeroLogAllGinOutput) { | ||||
| 		b.errorData.Log(stackSkipLogger.Error()) | ||||
| 	} else if b.errorData.Severity == SevWarn { | ||||
| 	} else if (b.errorData.Severity == SevWarn) && (pkgconfig.ZeroLogAllGinOutput) { | ||||
| 		b.errorData.Log(stackSkipLogger.Warn()) | ||||
| 	} | ||||
|  | ||||
| 	b.CallListener(MethodOutput) | ||||
| 	b.errorData.CallListener(MethodOutput) | ||||
| } | ||||
|  | ||||
| // Print prints the error | ||||
| // If the error is SevErr we also send it to the error-service | ||||
| func (b *Builder) Print() { | ||||
| func (b *Builder) Print(ctxs ...context.Context) { | ||||
| 	warnOnPkgConfigNotInitialized() | ||||
|  | ||||
| 	for _, dctx := range ctxs { | ||||
| 		b.CtxData(MethodPrint, dctx) | ||||
| 	} | ||||
|  | ||||
| 	if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal { | ||||
| 		b.errorData.Log(stackSkipLogger.Error()) | ||||
| 	} else if b.errorData.Severity == SevWarn { | ||||
| 		b.errorData.ShortLog(stackSkipLogger.Warn()) | ||||
| 	} | ||||
|  | ||||
| 	b.CallListener(MethodPrint) | ||||
| 	b.errorData.CallListener(MethodPrint) | ||||
| } | ||||
|  | ||||
| func (b *Builder) Format(level LogPrintLevel) string { | ||||
| @@ -463,11 +491,16 @@ func (b *Builder) Format(level LogPrintLevel) string { | ||||
|  | ||||
| // Fatal prints the error and terminates the program | ||||
| // If the error is SevErr we also send it to the error-service | ||||
| func (b *Builder) Fatal() { | ||||
| func (b *Builder) Fatal(ctxs ...context.Context) { | ||||
|  | ||||
| 	for _, dctx := range ctxs { | ||||
| 		b.CtxData(MethodFatal, dctx) | ||||
| 	} | ||||
|  | ||||
| 	b.errorData.Severity = SevFatal | ||||
| 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | ||||
|  | ||||
| 	b.CallListener(MethodFatal) | ||||
| 	b.errorData.CallListener(MethodFatal) | ||||
|  | ||||
| 	os.Exit(1) | ||||
| } | ||||
|   | ||||
| @@ -181,7 +181,7 @@ func getReflectedMetaValues(value interface{}, remainingDepth int) map[string]Me | ||||
|  | ||||
| 	jsonval, err := json.Marshal(value) | ||||
| 	if err != nil { | ||||
| 		panic(err) // gets recovered later up | ||||
| 		return map[string]MetaValue{"": {DataType: MDTString, Value: fmt.Sprintf("Failed to Marshal %T:\n%+v", value, value)}} | ||||
| 	} | ||||
|  | ||||
| 	return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}} | ||||
|   | ||||
| @@ -50,6 +50,7 @@ var ( | ||||
|  | ||||
| 	TypeSQLQuery  = NewType("SQL_QUERY", langext.Ptr(500)) | ||||
| 	TypeSQLBuild  = NewType("SQL_BUILD", langext.Ptr(500)) | ||||
| 	TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500)) | ||||
|  | ||||
| 	TypeWrap = NewType("Wrap", nil) | ||||
|  | ||||
| @@ -71,15 +72,18 @@ var ( | ||||
| 	// other values come from the downstream application that uses goext | ||||
| ) | ||||
|  | ||||
| var registeredTypes = dataext.SyncSet[string]{} | ||||
| var registeredTypes = dataext.SyncMap[string, ErrorType]{} | ||||
|  | ||||
| func NewType(key string, defStatusCode *int) ErrorType { | ||||
| 	insertOkay := registeredTypes.Add(key) | ||||
| 	if !insertOkay { | ||||
| 		panic("Cannot register same ErrType ('" + key + "') more than once") | ||||
| 	et := ErrorType{key, defStatusCode} | ||||
|  | ||||
| 	registeredTypes.Set(key, et) | ||||
|  | ||||
| 	return et | ||||
| } | ||||
|  | ||||
| 	return ErrorType{key, defStatusCode} | ||||
| func ListRegisteredTypes() []ErrorType { | ||||
| 	return registeredTypes.GetAllValues() | ||||
| } | ||||
|  | ||||
| type LogPrintLevel string | ||||
|   | ||||
| @@ -1,8 +1,11 @@ | ||||
| package exerr | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"github.com/gin-gonic/gin" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"net/http" | ||||
| ) | ||||
|  | ||||
| type ErrorPackageConfig struct { | ||||
| @@ -13,6 +16,11 @@ type ErrorPackageConfig struct { | ||||
| 	IncludeMetaInGinOutput bool                                                                     // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output() | ||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any)                                    // (Optionally) extend the gin output with more fields | ||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any)                         // (Optionally) extend the gin `__data` output with more fields | ||||
| 	DisableErrorWrapping   bool                                                                     // Disables the exerr.Wrap()...Build() function - will always return the original error | ||||
| 	ZeroLogErrGinOutput    bool                                                                     // autom print zerolog logs on ginext.Error() / .Output(gin)  (for SevErr and SevFatal) | ||||
| 	ZeroLogAllGinOutput    bool                                                                     // autom print zerolog logs on ginext.Error() / .Output(gin)  (for all Severities) | ||||
| 	ExtendGinMeta          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) // (Optionally) extend the final error meta values with additional data from the gin context (a few are automatically added, here more can be included) | ||||
| 	ExtendContextMeta      func(b *Builder, method Method, dctx context.Context)                    // (Optionally) extend the final error meta values with additional data from the context (a few are automatically added, here more can be included) | ||||
| } | ||||
|  | ||||
| type ErrorPackageConfigInit struct { | ||||
| @@ -23,6 +31,11 @@ type ErrorPackageConfigInit struct { | ||||
| 	IncludeMetaInGinOutput *bool | ||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any) | ||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | ||||
| 	DisableErrorWrapping   *bool | ||||
| 	ZeroLogErrGinOutput    *bool | ||||
| 	ZeroLogAllGinOutput    *bool | ||||
| 	ExtendGinMeta          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) | ||||
| 	ExtendContextMeta      func(b *Builder, method Method, dctx context.Context) | ||||
| } | ||||
|  | ||||
| var initialized = false | ||||
| @@ -35,6 +48,11 @@ var pkgconfig = ErrorPackageConfig{ | ||||
| 	IncludeMetaInGinOutput: true, | ||||
| 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | ||||
| 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | ||||
| 	DisableErrorWrapping:   false, | ||||
| 	ZeroLogErrGinOutput:    true, | ||||
| 	ZeroLogAllGinOutput:    false, | ||||
| 	ExtendGinMeta:          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {}, | ||||
| 	ExtendContextMeta:      func(b *Builder, method Method, dctx context.Context) {}, | ||||
| } | ||||
|  | ||||
| // Init initializes the exerr packages | ||||
| @@ -47,6 +65,8 @@ func Init(cfg ErrorPackageConfigInit) { | ||||
|  | ||||
| 	ego := func(err *ExErr, json map[string]any) {} | ||||
| 	egdo := func(err *ExErr, depth int, json map[string]any) {} | ||||
| 	egm := func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {} | ||||
| 	egcm := func(b *Builder, method Method, dctx context.Context) {} | ||||
|  | ||||
| 	if cfg.ExtendGinOutput != nil { | ||||
| 		ego = cfg.ExtendGinOutput | ||||
| @@ -54,6 +74,12 @@ func Init(cfg ErrorPackageConfigInit) { | ||||
| 	if cfg.ExtendGinDataOutput != nil { | ||||
| 		egdo = cfg.ExtendGinDataOutput | ||||
| 	} | ||||
| 	if cfg.ExtendGinMeta != nil { | ||||
| 		egm = cfg.ExtendGinMeta | ||||
| 	} | ||||
| 	if cfg.ExtendContextMeta != nil { | ||||
| 		egcm = cfg.ExtendContextMeta | ||||
| 	} | ||||
|  | ||||
| 	pkgconfig = ErrorPackageConfig{ | ||||
| 		ZeroLogErrTraces:       langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces), | ||||
| @@ -63,6 +89,11 @@ func Init(cfg ErrorPackageConfigInit) { | ||||
| 		IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput), | ||||
| 		ExtendGinOutput:        ego, | ||||
| 		ExtendGinDataOutput:    egdo, | ||||
| 		DisableErrorWrapping:   langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping), | ||||
| 		ZeroLogAllGinOutput:    langext.Coalesce(cfg.ZeroLogAllGinOutput, pkgconfig.ZeroLogAllGinOutput), | ||||
| 		ZeroLogErrGinOutput:    langext.Coalesce(cfg.ZeroLogErrGinOutput, pkgconfig.ZeroLogErrGinOutput), | ||||
| 		ExtendGinMeta:          egm, | ||||
| 		ExtendContextMeta:      egcm, | ||||
| 	} | ||||
|  | ||||
| 	initialized = true | ||||
|   | ||||
| @@ -15,10 +15,10 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | ||||
| 		ginJson["id"] = ee.UniqueID | ||||
| 	} | ||||
| 	if ee.Category != CatWrap { | ||||
| 		ginJson["category"] = ee.Category | ||||
| 		ginJson["category"] = ee.Category.Category | ||||
| 	} | ||||
| 	if ee.Type != TypeWrap { | ||||
| 		ginJson["type"] = ee.Type | ||||
| 		ginJson["type"] = ee.Type.Key | ||||
| 	} | ||||
| 	if ee.StatusCode != nil { | ||||
| 		ginJson["statuscode"] = ee.StatusCode | ||||
| @@ -30,7 +30,7 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | ||||
| 		ginJson["caller"] = ee.Caller | ||||
| 	} | ||||
| 	if ee.Severity != SevErr { | ||||
| 		ginJson["severity"] = ee.Severity | ||||
| 		ginJson["severity"] = ee.Severity.Severity | ||||
| 	} | ||||
| 	if ee.Timestamp != (time.Time{}) { | ||||
| 		ginJson["time"] = ee.Timestamp.Format(time.RFC3339) | ||||
|   | ||||
| @@ -25,13 +25,11 @@ func RegisterListener(l Listener) { | ||||
| 	listener = append(listener, l) | ||||
| } | ||||
|  | ||||
| func (b *Builder) CallListener(m Method) { | ||||
| 	valErr := b.errorData | ||||
|  | ||||
| func (ee *ExErr) CallListener(m Method) { | ||||
| 	listenerLock.Lock() | ||||
| 	defer listenerLock.Unlock() | ||||
|  | ||||
| 	for _, v := range listener { | ||||
| 		v(m, valErr) | ||||
| 		v(m, ee) | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -9,6 +9,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson" | ||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"math" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| @@ -667,6 +668,28 @@ func (v MetaValue) rawValueForJson() any { | ||||
| 		} | ||||
| 		return v.Value.(EnumWrap).ValueString | ||||
| 	} | ||||
| 	if v.DataType == MDTFloat32 { | ||||
| 		if math.IsNaN(float64(v.Value.(float32))) { | ||||
| 			return "float64::NaN" | ||||
| 		} else if math.IsInf(float64(v.Value.(float32)), +1) { | ||||
| 			return "float64::+inf" | ||||
| 		} else if math.IsInf(float64(v.Value.(float32)), -1) { | ||||
| 			return "float64::-inf" | ||||
| 		} else { | ||||
| 			return v.Value | ||||
| 		} | ||||
| 	} | ||||
| 	if v.DataType == MDTFloat64 { | ||||
| 		if math.IsNaN(v.Value.(float64)) { | ||||
| 			return "float64::NaN" | ||||
| 		} else if math.IsInf(v.Value.(float64), +1) { | ||||
| 			return "float64::+inf" | ||||
| 		} else if math.IsInf(v.Value.(float64), -1) { | ||||
| 			return "float64::-inf" | ||||
| 		} else { | ||||
| 			return v.Value | ||||
| 		} | ||||
| 	} | ||||
| 	return v.Value | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -9,6 +9,7 @@ import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||
| 	"net" | ||||
| 	"net/http" | ||||
| 	"net/http/httptest" | ||||
| 	"regexp" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| @@ -16,8 +17,9 @@ import ( | ||||
|  | ||||
| type GinWrapper struct { | ||||
| 	engine          *gin.Engine | ||||
| 	SuppressGinLogs bool | ||||
| 	suppressGinLogs bool | ||||
|  | ||||
| 	opt                   Options | ||||
| 	allowCors             bool | ||||
| 	ginDebug              bool | ||||
| 	bufferBody            bool | ||||
| @@ -38,10 +40,13 @@ type ginRouteSpec struct { | ||||
| type Options struct { | ||||
| 	AllowCors                *bool                                     // Add cors handler to allow all CORS requests on the default http methods | ||||
| 	GinDebug                 *bool                                     // Set gin.debug to true (adds more logs) | ||||
| 	SuppressGinLogs          *bool                                     // Suppress our custom gin logs (even if GinDebug == true) | ||||
| 	BufferBody               *bool                                     // Buffers the input body stream, this way the ginext error handler can later include the whole request body | ||||
| 	Timeout                  *time.Duration                            // The default handler timeout | ||||
| 	ListenerBeforeRequest    []func(g *gin.Context)                    // Register listener that are called before the handler method | ||||
| 	ListenerAfterRequest     []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method | ||||
| 	DebugTrimHandlerPrefixes []string                                  // Trim these prefixes from the handler names in the debug print | ||||
| 	DebugReplaceHandlerNames map[string]string                         // Replace handler names in debug output | ||||
| } | ||||
|  | ||||
| // NewEngine creates a new (wrapped) ginEngine | ||||
| @@ -50,7 +55,8 @@ func NewEngine(opt Options) *GinWrapper { | ||||
|  | ||||
| 	wrapper := &GinWrapper{ | ||||
| 		engine:                engine, | ||||
| 		SuppressGinLogs:       false, | ||||
| 		opt:                   opt, | ||||
| 		suppressGinLogs:       langext.Coalesce(opt.SuppressGinLogs, false), | ||||
| 		allowCors:             langext.Coalesce(opt.AllowCors, false), | ||||
| 		ginDebug:              langext.Coalesce(opt.GinDebug, true), | ||||
| 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | ||||
| @@ -72,12 +78,12 @@ func NewEngine(opt Options) *GinWrapper { | ||||
| 	if !wrapper.ginDebug { | ||||
| 		gin.SetMode(gin.ReleaseMode) | ||||
|  | ||||
| 		if !wrapper.suppressGinLogs { | ||||
| 			ginlogger := gin.Logger() | ||||
| 			engine.Use(func(context *gin.Context) { | ||||
| 			if !wrapper.SuppressGinLogs { | ||||
| 				ginlogger(context) | ||||
| 			} | ||||
| 			}) | ||||
| 		} | ||||
| 	} else { | ||||
| 		gin.SetMode(gin.DebugMode) | ||||
| 	} | ||||
| @@ -183,5 +189,30 @@ func (w *GinWrapper) cleanMiddlewareName(fname string) string { | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	for _, pfx := range w.opt.DebugTrimHandlerPrefixes { | ||||
| 		if strings.HasPrefix(fname, pfx) { | ||||
| 			fname = fname[len(pfx):] | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	for k, v := range langext.ForceMap(w.opt.DebugReplaceHandlerNames) { | ||||
| 		if strings.EqualFold(fname, k) { | ||||
| 			fname = v | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return fname | ||||
| } | ||||
|  | ||||
| // ServeHTTP only used for unit tests | ||||
| func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder { | ||||
| 	respRec := httptest.NewRecorder() | ||||
| 	w.engine.ServeHTTP(respRec, req) | ||||
| 	return respRec | ||||
| } | ||||
|  | ||||
| // ForwardRequest manually inserts a request into this router | ||||
| // = behaves as if the request came from the outside (and writes the response to `writer`) | ||||
| func (w *GinWrapper) ForwardRequest(writer http.ResponseWriter, req *http.Request) { | ||||
| 	w.engine.ServeHTTP(writer, req) | ||||
| } | ||||
|   | ||||
| @@ -163,16 +163,16 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
|  | ||||
| 	ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout)) | ||||
|  | ||||
| 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | ||||
|  | ||||
| 	if pctx.persistantData.sessionObj != nil { | ||||
| 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, ictx) | ||||
| 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx) | ||||
| 		if err != nil { | ||||
| 			cancel() | ||||
| 			actx.Cancel() | ||||
| 			return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build())) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | ||||
|  | ||||
| 	return actx, pctx.ginCtx, nil | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| package ginext | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"github.com/gin-gonic/gin" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| @@ -9,6 +10,16 @@ import ( | ||||
| 	"os" | ||||
| ) | ||||
|  | ||||
| type cookieval struct { | ||||
| 	name     string | ||||
| 	value    string | ||||
| 	maxAge   int | ||||
| 	path     string | ||||
| 	domain   string | ||||
| 	secure   bool | ||||
| 	httpOnly bool | ||||
| } | ||||
|  | ||||
| type headerval struct { | ||||
| 	Key string | ||||
| 	Val string | ||||
| @@ -17,6 +28,7 @@ type headerval struct { | ||||
| type HTTPResponse interface { | ||||
| 	Write(g *gin.Context) | ||||
| 	WithHeader(k string, v string) HTTPResponse | ||||
| 	WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse | ||||
| 	IsSuccess() bool | ||||
| } | ||||
|  | ||||
| @@ -33,6 +45,7 @@ type jsonHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	data       any | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender { | ||||
| @@ -47,6 +60,9 @@ func (j jsonHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Render(j.statusCode, j.jsonRenderer(g)) | ||||
| } | ||||
|  | ||||
| @@ -55,6 +71,11 @@ func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j jsonHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -82,12 +103,16 @@ func (j jsonHTTPResponse) Headers() []string { | ||||
| type emptyHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j emptyHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Status(j.statusCode) | ||||
| } | ||||
|  | ||||
| @@ -96,6 +121,11 @@ func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j emptyHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -120,12 +150,16 @@ type textHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	data       string | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j textHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.String(j.statusCode, "%s", j.data) | ||||
| } | ||||
|  | ||||
| @@ -134,6 +168,11 @@ func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j textHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -159,12 +198,16 @@ type dataHTTPResponse struct { | ||||
| 	data        []byte | ||||
| 	contentType string | ||||
| 	headers     []headerval | ||||
| 	cookies     []cookieval | ||||
| } | ||||
|  | ||||
| func (j dataHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Data(j.statusCode, j.contentType, j.data) | ||||
| } | ||||
|  | ||||
| @@ -173,6 +216,11 @@ func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j dataHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -198,6 +246,7 @@ type fileHTTPResponse struct { | ||||
| 	filepath string | ||||
| 	filename *string | ||||
| 	headers  []headerval | ||||
| 	cookies  []cookieval | ||||
| } | ||||
|  | ||||
| func (j fileHTTPResponse) Write(g *gin.Context) { | ||||
| @@ -209,6 +258,9 @@ func (j fileHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.File(j.filepath) | ||||
| } | ||||
|  | ||||
| @@ -217,6 +269,11 @@ func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j fileHTTPResponse) IsSuccess() bool { | ||||
| 	return true | ||||
| } | ||||
| @@ -247,17 +304,20 @@ type downloadDataHTTPResponse struct { | ||||
| 	data       []byte | ||||
| 	filename   *string | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j downloadDataHTTPResponse) Write(g *gin.Context) { | ||||
| 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... | ||||
| 	if j.filename != nil { | ||||
| 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) | ||||
|  | ||||
| 	} | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Data(j.statusCode, j.mimetype, j.data) | ||||
| } | ||||
|  | ||||
| @@ -266,6 +326,11 @@ func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j downloadDataHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -290,9 +355,16 @@ type redirectHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	url        string | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j redirectHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Redirect(j.statusCode, j.url) | ||||
| } | ||||
|  | ||||
| @@ -301,6 +373,11 @@ func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j redirectHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -324,10 +401,20 @@ func (j redirectHTTPResponse) Headers() []string { | ||||
| type jsonAPIErrResponse struct { | ||||
| 	err     *exerr.ExErr | ||||
| 	headers []headerval | ||||
| 	cookies []cookieval | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) Write(g *gin.Context) { | ||||
| 	j.err.Output(g) | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
|  | ||||
| 	exerr.Get(j.err).Output(context.Background(), g) | ||||
|  | ||||
| 	j.err.CallListener(exerr.MethodOutput) | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| @@ -335,6 +422,11 @@ func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) IsSuccess() bool { | ||||
| 	return false | ||||
| } | ||||
|   | ||||
| @@ -53,15 +53,11 @@ func (w *GinRoutesWrapper) Group(relativePath string) *GinRoutesWrapper { | ||||
| func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper { | ||||
| 	defHandler := langext.ArrCopy(w.defaultHandler) | ||||
| 	defHandler = append(defHandler, middleware...) | ||||
| 	return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler} | ||||
| 	return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler, absPath: w.absPath} | ||||
| } | ||||
|  | ||||
| func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper { | ||||
| 	defHandler := langext.ArrCopy(w.defaultHandler) | ||||
| 	defHandler = append(defHandler, func(g *gin.Context) { | ||||
| 		g.Set("goext.jsonfilter", filter) | ||||
| 	}) | ||||
| 	return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler} | ||||
| 	return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) }) | ||||
| } | ||||
|  | ||||
| func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder { | ||||
| @@ -116,10 +112,7 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder { | ||||
| } | ||||
|  | ||||
| func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder { | ||||
| 	w.handlers = append(w.handlers, func(g *gin.Context) { | ||||
| 		g.Set("goext.jsonfilter", filter) | ||||
| 	}) | ||||
| 	return w | ||||
| 	return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) }) | ||||
| } | ||||
|  | ||||
| func (w *GinRouteBuilder) Handle(handler WHandlerFunc) { | ||||
|   | ||||
| @@ -6,6 +6,6 @@ import ( | ||||
| ) | ||||
|  | ||||
| type SessionObject interface { | ||||
| 	Init(g *gin.Context, ctx context.Context) error | ||||
| 	Init(g *gin.Context, ctx *AppContext) error | ||||
| 	Finish(ctx context.Context, resp HTTPResponse) error | ||||
| } | ||||
|   | ||||
							
								
								
									
										46
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										46
									
								
								go.mod
									
									
									
									
									
								
							| @@ -1,54 +1,58 @@ | ||||
| module gogs.mikescher.com/BlackForestBytes/goext | ||||
|  | ||||
| go 1.21 | ||||
| go 1.22 | ||||
|  | ||||
| require ( | ||||
| 	github.com/gin-gonic/gin v1.9.1 | ||||
| 	github.com/jmoiron/sqlx v1.3.5 | ||||
| 	github.com/rs/xid v1.5.0 | ||||
| 	github.com/rs/zerolog v1.31.0 | ||||
| 	go.mongodb.org/mongo-driver v1.13.1 | ||||
| 	golang.org/x/crypto v0.17.0 | ||||
| 	golang.org/x/sys v0.16.0 | ||||
| 	golang.org/x/term v0.16.0 | ||||
| 	github.com/gin-gonic/gin v1.10.0 | ||||
| 	github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.- | ||||
| 	github.com/jmoiron/sqlx v1.4.0 | ||||
| 	github.com/rs/xid v1.5.0 | ||||
| 	github.com/rs/zerolog v1.32.0 | ||||
| 	go.mongodb.org/mongo-driver v1.15.0 | ||||
| 	golang.org/x/crypto v0.23.0 | ||||
| 	golang.org/x/sys v0.20.0 | ||||
| 	golang.org/x/term v0.20.0 | ||||
| ) | ||||
|  | ||||
| require golang.org/x/sync v0.7.0 | ||||
|  | ||||
| require ( | ||||
| 	github.com/bytedance/sonic v1.10.2 // indirect | ||||
| 	github.com/bytedance/sonic v1.11.6 // indirect | ||||
| 	github.com/bytedance/sonic/loader v0.1.1 // indirect | ||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | ||||
| 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | ||||
| 	github.com/cloudwego/base64x v0.1.4 // indirect | ||||
| 	github.com/cloudwego/iasm v0.2.0 // indirect | ||||
| 	github.com/dustin/go-humanize v1.0.1 // indirect | ||||
| 	github.com/gabriel-vasile/mimetype v1.4.3 // indirect | ||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||
| 	github.com/go-playground/locales v0.14.1 // indirect | ||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||
| 	github.com/go-playground/validator/v10 v10.16.0 // indirect | ||||
| 	github.com/go-playground/validator/v10 v10.20.0 // indirect | ||||
| 	github.com/goccy/go-json v0.10.2 // indirect | ||||
| 	github.com/golang/snappy v0.0.4 // indirect | ||||
| 	github.com/google/uuid v1.5.0 // indirect | ||||
| 	github.com/json-iterator/go v1.1.12 // indirect | ||||
| 	github.com/klauspost/compress v1.17.4 // indirect | ||||
| 	github.com/klauspost/cpuid/v2 v2.2.6 // indirect | ||||
| 	github.com/leodido/go-urn v1.2.4 // indirect | ||||
| 	github.com/klauspost/compress v1.17.8 // indirect | ||||
| 	github.com/klauspost/cpuid/v2 v2.2.7 // indirect | ||||
| 	github.com/leodido/go-urn v1.4.0 // indirect | ||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||
| 	github.com/mattn/go-isatty v0.0.20 // indirect | ||||
| 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | ||||
| 	github.com/modern-go/reflect2 v1.0.2 // indirect | ||||
| 	github.com/montanaflynn/stats v0.7.1 // indirect | ||||
| 	github.com/pelletier/go-toml/v2 v2.1.1 // indirect | ||||
| 	github.com/pelletier/go-toml/v2 v2.2.2 // indirect | ||||
| 	github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect | ||||
| 	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect | ||||
| 	github.com/ugorji/go/codec v1.2.12 // indirect | ||||
| 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | ||||
| 	github.com/xdg-go/scram v1.1.2 // indirect | ||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||
| 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | ||||
| 	golang.org/x/arch v0.7.0 // indirect | ||||
| 	golang.org/x/net v0.19.0 // indirect | ||||
| 	golang.org/x/sync v0.6.0 // indirect | ||||
| 	golang.org/x/text v0.14.0 // indirect | ||||
| 	google.golang.org/protobuf v1.32.0 // indirect | ||||
| 	github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 // indirect | ||||
| 	golang.org/x/arch v0.8.0 // indirect | ||||
| 	golang.org/x/net v0.25.0 // indirect | ||||
| 	golang.org/x/text v0.15.0 // indirect | ||||
| 	google.golang.org/protobuf v1.34.1 // indirect | ||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||
| 	modernc.org/libc v1.37.6 // indirect | ||||
| 	modernc.org/mathutil v1.6.0 // indirect | ||||
|   | ||||
							
								
								
									
										123
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										123
									
								
								go.sum
									
									
									
									
									
								
							| @@ -1,7 +1,25 @@ | ||||
| filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= | ||||
| github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= | ||||
| github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= | ||||
| github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= | ||||
| github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo= | ||||
| github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.1 h1:JC0+6c9FoWYYxakaoa+c5QTtJeiSZNeByOBhXtAFSn4= | ||||
| github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A= | ||||
| github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.3 h1:jRN+yEjakWh8aK5FzrciUHG8OFXK+4/KrAX/ysEtHAA= | ||||
| github.com/bytedance/sonic v1.11.3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.4 h1:8+OMLSSDDm2/qJc6ld5K5Sm62NK9VHcUKk0NzBoMAM4= | ||||
| github.com/bytedance/sonic v1.11.4/go.mod h1:YrWEqYtlBPS6LUA0vpuG79a1trsh4Ae41uWUWUreHhE= | ||||
| github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k= | ||||
| github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw= | ||||
| github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0= | ||||
| github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||
| github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY= | ||||
| github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= | ||||
| github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||
| github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= | ||||
| github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | ||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | ||||
| @@ -9,6 +27,17 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpV | ||||
| github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||
| github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= | ||||
| github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||
| github.com/cloudwego/base64x v0.1.0 h1:Tg5q9tq1khq9Y9UwfoC6zkHK0FypN2GLDvhqFceOL8U= | ||||
| github.com/cloudwego/base64x v0.1.0/go.mod h1:lM8nFiNbg74QgesNo6EAtv8N9tlRjBWExmHoNDa3PkU= | ||||
| github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg= | ||||
| github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8= | ||||
| github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= | ||||
| github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= | ||||
| github.com/cloudwego/iasm v0.0.9/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||
| github.com/cloudwego/iasm v0.1.0 h1:q0OuhwWDMyi3nlrQ6kIr0Yx0c3FI6cq/OZWKodIDdz8= | ||||
| github.com/cloudwego/iasm v0.1.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||
| github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= | ||||
| github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||
| github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | ||||
| github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||
| github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | ||||
| @@ -21,6 +50,8 @@ github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE | ||||
| github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | ||||
| github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | ||||
| github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= | ||||
| github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= | ||||
| github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= | ||||
| github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ= | ||||
| github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc= | ||||
| github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= | ||||
| @@ -31,11 +62,22 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn | ||||
| github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | ||||
| github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | ||||
| github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||
| github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74= | ||||
| github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||
| github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U= | ||||
| github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= | ||||
| github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8= | ||||
| github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||
| github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= | ||||
| github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= | ||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||
| github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | ||||
| github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= | ||||
| github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | ||||
| github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||
| github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= | ||||
| github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||
| @@ -49,19 +91,32 @@ github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= | ||||
| github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||||
| github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | ||||
| github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | ||||
| github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= | ||||
| github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= | ||||
| github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= | ||||
| github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= | ||||
| github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | ||||
| github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= | ||||
| github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||
| github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= | ||||
| github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||
| github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= | ||||
| github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||
| github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU= | ||||
| github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||
| github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | ||||
| github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||
| github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= | ||||
| github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||
| github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | ||||
| github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | ||||
| github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | ||||
| github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= | ||||
| github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= | ||||
| github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= | ||||
| github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= | ||||
| github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= | ||||
| github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= | ||||
| github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= | ||||
| github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | ||||
| @@ -71,6 +126,7 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D | ||||
| github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | ||||
| github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= | ||||
| github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= | ||||
| github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= | ||||
| github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= | ||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||
| @@ -81,6 +137,12 @@ github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8 | ||||
| github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= | ||||
| github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= | ||||
| github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= | ||||
| github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo= | ||||
| github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||
| github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg= | ||||
| github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||
| github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= | ||||
| github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||
| github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||
| github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | ||||
| github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | ||||
| @@ -90,9 +152,12 @@ github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= | ||||
| github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= | ||||
| github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= | ||||
| github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||
| github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= | ||||
| github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||
| github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= | ||||
| github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= | ||||
| github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= | ||||
| github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= | ||||
| github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | ||||
| github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | ||||
| github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | ||||
| @@ -101,6 +166,8 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o | ||||
| github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | ||||
| github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= | ||||
| github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= | ||||
| github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= | ||||
| github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= | ||||
| github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= | ||||
| github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= | ||||
| github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= | ||||
| @@ -114,18 +181,38 @@ github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gi | ||||
| github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= | ||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= | ||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= | ||||
| github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 h1:tBiBTKHnIjovYoLX/TPkcf+OjqqKGQrPtGT3Foz+Pgo= | ||||
| github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76/go.mod h1:SQliXeA7Dhkt//vS29v3zpbEwoa+zb2Cn5xj5uO4K5U= | ||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||
| go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= | ||||
| go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= | ||||
| go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= | ||||
| go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||
| go.mongodb.org/mongo-driver v1.15.0 h1:rJCKC8eEliewXjZGf0ddURtl7tTVy1TK3bfl0gkUSLc= | ||||
| go.mongodb.org/mongo-driver v1.15.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||
| golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||
| golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= | ||||
| golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||
| golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc= | ||||
| golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||
| golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||
| golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | ||||
| golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | ||||
| golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= | ||||
| golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= | ||||
| golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= | ||||
| golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= | ||||
| golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= | ||||
| golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= | ||||
| golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg= | ||||
| golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= | ||||
| golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= | ||||
| golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= | ||||
| golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= | ||||
| golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= | ||||
| golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= | ||||
| golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= | ||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||
| @@ -134,10 +221,22 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx | ||||
| golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | ||||
| golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= | ||||
| golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= | ||||
| golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= | ||||
| golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= | ||||
| golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= | ||||
| golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= | ||||
| golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc= | ||||
| golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= | ||||
| golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= | ||||
| golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= | ||||
| golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= | ||||
| golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= | ||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | ||||
| golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||
| golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= | ||||
| golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||
| golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||
| golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| @@ -151,10 +250,26 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= | ||||
| golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= | ||||
| golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= | ||||
| golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= | ||||
| golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= | ||||
| golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||
| golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||
| golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= | ||||
| golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= | ||||
| golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= | ||||
| golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= | ||||
| golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= | ||||
| golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= | ||||
| golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= | ||||
| golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= | ||||
| golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw= | ||||
| golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= | ||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||
| @@ -163,6 +278,8 @@ golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | ||||
| golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | ||||
| golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= | ||||
| golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||
| golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= | ||||
| golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||
| golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | ||||
| golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||
| golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | ||||
| @@ -172,6 +289,12 @@ golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSm | ||||
| golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= | ||||
| google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= | ||||
| google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||
| google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= | ||||
| google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||
| google.golang.org/protobuf v1.34.0 h1:Qo/qEd2RZPCf2nKuorzksSknv0d3ERwp1vFG38gSmH4= | ||||
| google.golang.org/protobuf v1.34.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||
| google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= | ||||
| google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | ||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||
| gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| package goext | ||||
|  | ||||
| const GoextVersion = "0.0.360" | ||||
| const GoextVersion = "0.0.449" | ||||
|  | ||||
| const GoextVersionTimestamp = "2024-01-06T01:31:07+0100" | ||||
| const GoextVersionTimestamp = "2024-05-12T16:51:52+0200" | ||||
|   | ||||
| @@ -217,6 +217,7 @@ type decodeState struct { | ||||
| 	savedError            error | ||||
| 	useNumber             bool | ||||
| 	disallowUnknownFields bool | ||||
| 	tagkey                *string | ||||
| } | ||||
|  | ||||
| // readIndex returns the position of the last byte read. | ||||
| @@ -652,7 +653,11 @@ func (d *decodeState) object(v reflect.Value) error { | ||||
| 			v.Set(reflect.MakeMap(t)) | ||||
| 		} | ||||
| 	case reflect.Struct: | ||||
| 		fields = cachedTypeFields(t) | ||||
| 		tagkey := "json" | ||||
| 		if d.tagkey != nil { | ||||
| 			tagkey = *d.tagkey | ||||
| 		} | ||||
| 		fields = cachedTypeFields(t, tagkey) | ||||
| 		// ok | ||||
| 	default: | ||||
| 		d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)}) | ||||
|   | ||||
| @@ -382,7 +382,12 @@ func isEmptyValue(v reflect.Value) bool { | ||||
| } | ||||
|  | ||||
| func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) { | ||||
| 	valueEncoder(v)(e, v, opts) | ||||
| 	tagkey := "json" | ||||
| 	if opts.tagkey != nil { | ||||
| 		tagkey = *opts.tagkey | ||||
| 	} | ||||
|  | ||||
| 	valueEncoder(v, tagkey)(e, v, opts) | ||||
| } | ||||
|  | ||||
| type encOpts struct { | ||||
| @@ -397,20 +402,22 @@ type encOpts struct { | ||||
| 	// filter matches jsonfilter tag of struct | ||||
| 	// marshals if no jsonfilter is set or otherwise if jsonfilter has the filter value | ||||
| 	filter *string | ||||
| 	// use different tag instead of "json" | ||||
| 	tagkey *string | ||||
| } | ||||
|  | ||||
| type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts) | ||||
|  | ||||
| var encoderCache sync.Map // map[reflect.Type]encoderFunc | ||||
|  | ||||
| func valueEncoder(v reflect.Value) encoderFunc { | ||||
| func valueEncoder(v reflect.Value, tagkey string) encoderFunc { | ||||
| 	if !v.IsValid() { | ||||
| 		return invalidValueEncoder | ||||
| 	} | ||||
| 	return typeEncoder(v.Type()) | ||||
| 	return typeEncoder(v.Type(), tagkey) | ||||
| } | ||||
|  | ||||
| func typeEncoder(t reflect.Type) encoderFunc { | ||||
| func typeEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	if fi, ok := encoderCache.Load(t); ok { | ||||
| 		return fi.(encoderFunc) | ||||
| 	} | ||||
| @@ -433,7 +440,7 @@ func typeEncoder(t reflect.Type) encoderFunc { | ||||
| 	} | ||||
|  | ||||
| 	// Compute the real encoder and replace the indirect func with it. | ||||
| 	f = newTypeEncoder(t, true) | ||||
| 	f = newTypeEncoder(t, true, tagkey) | ||||
| 	wg.Done() | ||||
| 	encoderCache.Store(t, f) | ||||
| 	return f | ||||
| @@ -446,19 +453,19 @@ var ( | ||||
|  | ||||
| // newTypeEncoder constructs an encoderFunc for a type. | ||||
| // The returned encoder only checks CanAddr when allowAddr is true. | ||||
| func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc { | ||||
| func newTypeEncoder(t reflect.Type, allowAddr bool, tagkey string) encoderFunc { | ||||
| 	// If we have a non-pointer value whose type implements | ||||
| 	// Marshaler with a value receiver, then we're better off taking | ||||
| 	// the address of the value - otherwise we end up with an | ||||
| 	// allocation as we cast the value to an interface. | ||||
| 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(marshalerType) { | ||||
| 		return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false)) | ||||
| 		return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false, tagkey)) | ||||
| 	} | ||||
| 	if t.Implements(marshalerType) { | ||||
| 		return marshalerEncoder | ||||
| 	} | ||||
| 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(textMarshalerType) { | ||||
| 		return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false)) | ||||
| 		return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false, tagkey)) | ||||
| 	} | ||||
| 	if t.Implements(textMarshalerType) { | ||||
| 		return textMarshalerEncoder | ||||
| @@ -480,15 +487,15 @@ func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc { | ||||
| 	case reflect.Interface: | ||||
| 		return interfaceEncoder | ||||
| 	case reflect.Struct: | ||||
| 		return newStructEncoder(t) | ||||
| 		return newStructEncoder(t, tagkey) | ||||
| 	case reflect.Map: | ||||
| 		return newMapEncoder(t) | ||||
| 		return newMapEncoder(t, tagkey) | ||||
| 	case reflect.Slice: | ||||
| 		return newSliceEncoder(t) | ||||
| 		return newSliceEncoder(t, tagkey) | ||||
| 	case reflect.Array: | ||||
| 		return newArrayEncoder(t) | ||||
| 		return newArrayEncoder(t, tagkey) | ||||
| 	case reflect.Pointer: | ||||
| 		return newPtrEncoder(t) | ||||
| 		return newPtrEncoder(t, tagkey) | ||||
| 	default: | ||||
| 		return unsupportedTypeEncoder | ||||
| 	} | ||||
| @@ -801,8 +808,8 @@ FieldLoop: | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func newStructEncoder(t reflect.Type) encoderFunc { | ||||
| 	se := structEncoder{fields: cachedTypeFields(t)} | ||||
| func newStructEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	se := structEncoder{fields: cachedTypeFields(t, tagkey)} | ||||
| 	return se.encode | ||||
| } | ||||
|  | ||||
| @@ -855,7 +862,7 @@ func (me mapEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	e.ptrLevel-- | ||||
| } | ||||
|  | ||||
| func newMapEncoder(t reflect.Type) encoderFunc { | ||||
| func newMapEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	switch t.Key().Kind() { | ||||
| 	case reflect.String, | ||||
| 		reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, | ||||
| @@ -865,7 +872,7 @@ func newMapEncoder(t reflect.Type) encoderFunc { | ||||
| 			return unsupportedTypeEncoder | ||||
| 		} | ||||
| 	} | ||||
| 	me := mapEncoder{typeEncoder(t.Elem())} | ||||
| 	me := mapEncoder{typeEncoder(t.Elem(), tagkey)} | ||||
| 	return me.encode | ||||
| } | ||||
|  | ||||
| @@ -936,7 +943,7 @@ func (se sliceEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	e.ptrLevel-- | ||||
| } | ||||
|  | ||||
| func newSliceEncoder(t reflect.Type) encoderFunc { | ||||
| func newSliceEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	// Byte slices get special treatment; arrays don't. | ||||
| 	if t.Elem().Kind() == reflect.Uint8 { | ||||
| 		p := reflect.PointerTo(t.Elem()) | ||||
| @@ -944,7 +951,7 @@ func newSliceEncoder(t reflect.Type) encoderFunc { | ||||
| 			return encodeByteSlice | ||||
| 		} | ||||
| 	} | ||||
| 	enc := sliceEncoder{newArrayEncoder(t)} | ||||
| 	enc := sliceEncoder{newArrayEncoder(t, tagkey)} | ||||
| 	return enc.encode | ||||
| } | ||||
|  | ||||
| @@ -964,8 +971,8 @@ func (ae arrayEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	e.WriteByte(']') | ||||
| } | ||||
|  | ||||
| func newArrayEncoder(t reflect.Type) encoderFunc { | ||||
| 	enc := arrayEncoder{typeEncoder(t.Elem())} | ||||
| func newArrayEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	enc := arrayEncoder{typeEncoder(t.Elem(), tagkey)} | ||||
| 	return enc.encode | ||||
| } | ||||
|  | ||||
| @@ -992,8 +999,8 @@ func (pe ptrEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	e.ptrLevel-- | ||||
| } | ||||
|  | ||||
| func newPtrEncoder(t reflect.Type) encoderFunc { | ||||
| 	enc := ptrEncoder{typeEncoder(t.Elem())} | ||||
| func newPtrEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	enc := ptrEncoder{typeEncoder(t.Elem(), tagkey)} | ||||
| 	return enc.encode | ||||
| } | ||||
|  | ||||
| @@ -1270,7 +1277,7 @@ func (x byIndex) Less(i, j int) bool { | ||||
| // typeFields returns a list of fields that JSON should recognize for the given type. | ||||
| // The algorithm is breadth-first search over the set of structs to include - the top struct | ||||
| // and then any reachable anonymous structs. | ||||
| func typeFields(t reflect.Type) structFields { | ||||
| func typeFields(t reflect.Type, tagkey string) structFields { | ||||
| 	// Anonymous fields to explore at the current level and the next. | ||||
| 	current := []field{} | ||||
| 	next := []field{{typ: t}} | ||||
| @@ -1315,7 +1322,7 @@ func typeFields(t reflect.Type) structFields { | ||||
| 					// Ignore unexported non-embedded fields. | ||||
| 					continue | ||||
| 				} | ||||
| 				tag := sf.Tag.Get("json") | ||||
| 				tag := sf.Tag.Get(tagkey) | ||||
| 				if tag == "-" { | ||||
| 					continue | ||||
| 				} | ||||
| @@ -1449,7 +1456,7 @@ func typeFields(t reflect.Type) structFields { | ||||
|  | ||||
| 	for i := range fields { | ||||
| 		f := &fields[i] | ||||
| 		f.encoder = typeEncoder(typeByIndex(t, f.index)) | ||||
| 		f.encoder = typeEncoder(typeByIndex(t, f.index), tagkey) | ||||
| 	} | ||||
| 	nameIndex := make(map[string]int, len(fields)) | ||||
| 	for i, field := range fields { | ||||
| @@ -1474,13 +1481,26 @@ func dominantField(fields []field) (field, bool) { | ||||
| 	return fields[0], true | ||||
| } | ||||
|  | ||||
| var fieldCache sync.Map // map[reflect.Type]structFields | ||||
| var fieldCache sync.Map // map[string]map[reflect.Type]structFields | ||||
|  | ||||
| // cachedTypeFields is like typeFields but uses a cache to avoid repeated work. | ||||
| func cachedTypeFields(t reflect.Type) structFields { | ||||
| 	if f, ok := fieldCache.Load(t); ok { | ||||
| func cachedTypeFields(t reflect.Type, tagkey string) structFields { | ||||
| 	if m0, ok := fieldCache.Load(tagkey); ok { | ||||
|  | ||||
| 		if f, ok := m0.(*sync.Map).Load(t); ok { | ||||
| 			return f.(structFields) | ||||
| 		} | ||||
| 	f, _ := fieldCache.LoadOrStore(t, typeFields(t)) | ||||
| 		f, _ := m0.(*sync.Map).LoadOrStore(t, typeFields(t, tagkey)) | ||||
| 		return f.(structFields) | ||||
|  | ||||
| 	} else { | ||||
|  | ||||
| 		m0 := &sync.Map{} | ||||
| 		f, _ := m0.LoadOrStore(t, typeFields(t, tagkey)) | ||||
|  | ||||
| 		fieldCache.Store(tagkey, m0) | ||||
|  | ||||
| 		return f.(structFields) | ||||
| 	} | ||||
|  | ||||
| } | ||||
|   | ||||
| @@ -41,6 +41,9 @@ func (dec *Decoder) UseNumber() { dec.d.useNumber = true } | ||||
| // non-ignored, exported fields in the destination. | ||||
| func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true } | ||||
|  | ||||
| // TagKey sets a different TagKey (instead of "json") | ||||
| func (dec *Decoder) TagKey(v string) { dec.d.tagkey = &v } | ||||
|  | ||||
| // Decode reads the next JSON-encoded value from its | ||||
| // input and stores it in the value pointed to by v. | ||||
| // | ||||
|   | ||||
| @@ -265,6 +265,15 @@ func ArrFirstIndex[T comparable](arr []T, needle T) int { | ||||
| 	return -1 | ||||
| } | ||||
|  | ||||
| func ArrFirstIndexFunc[T any](arr []T, comp func(v T) bool) int { | ||||
| 	for i, v := range arr { | ||||
| 		if comp(v) { | ||||
| 			return i | ||||
| 		} | ||||
| 	} | ||||
| 	return -1 | ||||
| } | ||||
|  | ||||
| func ArrLastIndex[T comparable](arr []T, needle T) int { | ||||
| 	result := -1 | ||||
| 	for i, v := range arr { | ||||
| @@ -275,6 +284,16 @@ func ArrLastIndex[T comparable](arr []T, needle T) int { | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func ArrLastIndexFunc[T any](arr []T, comp func(v T) bool) int { | ||||
| 	result := -1 | ||||
| 	for i, v := range arr { | ||||
| 		if comp(v) { | ||||
| 			result = i | ||||
| 		} | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func AddToSet[T comparable](set []T, add T) []T { | ||||
| 	for _, v := range set { | ||||
| 		if v == add { | ||||
| @@ -434,6 +453,15 @@ func ArrConcat[T any](arr ...[]T) []T { | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| // ArrAppend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one | ||||
| func ArrAppend[T any](arr []T, add ...T) []T { | ||||
| 	r := ArrCopy(arr) | ||||
| 	for _, v := range add { | ||||
| 		r = append(r, v) | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| // ArrCopy does a shallow copy of the 'in' array | ||||
| func ArrCopy[T any](in []T) []T { | ||||
| 	out := make([]T, len(in)) | ||||
| @@ -449,6 +477,10 @@ func ArrRemove[T comparable](arr []T, needle T) []T { | ||||
| 	return arr | ||||
| } | ||||
|  | ||||
| func ArrRemoveAt[T any](arr []T, idx int) []T { | ||||
| 	return append(arr[:idx], arr[idx+1:]...) | ||||
| } | ||||
|  | ||||
| func ArrExcept[T comparable](arr []T, needles ...T) []T { | ||||
| 	r := make([]T, 0, len(arr)) | ||||
| 	rmlist := ArrToSet(needles) | ||||
| @@ -479,3 +511,33 @@ func JoinString(arr []string, delimiter string) string { | ||||
|  | ||||
| 	return str | ||||
| } | ||||
|  | ||||
| // ArrChunk splits the array into buckets of max-size `chunkSize` | ||||
| // order is being kept. | ||||
| // The last chunk may contain less than length elements. | ||||
| // | ||||
| // (chunkSize == -1) means no chunking | ||||
| // | ||||
| // see https://www.php.net/manual/en/function.array-chunk.php | ||||
| func ArrChunk[T any](arr []T, chunkSize int) [][]T { | ||||
| 	if chunkSize == -1 { | ||||
| 		return [][]T{arr} | ||||
| 	} | ||||
|  | ||||
| 	res := make([][]T, 0, 1+len(arr)/chunkSize) | ||||
|  | ||||
| 	i := 0 | ||||
| 	for i < len(arr) { | ||||
|  | ||||
| 		right := i + chunkSize | ||||
| 		if right >= len(arr) { | ||||
| 			right = len(arr) | ||||
| 		} | ||||
|  | ||||
| 		res = append(res, arr[i:right]) | ||||
|  | ||||
| 		i = right | ||||
| 	} | ||||
|  | ||||
| 	return res | ||||
| } | ||||
|   | ||||
| @@ -5,12 +5,76 @@ import ( | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| func Coalesce[T any](v *T, def T) T { | ||||
| 	if v == nil { | ||||
| 		return def | ||||
| 	} else { | ||||
| 		return *v | ||||
| func Coalesce[T any](v1 *T, def T) T { | ||||
| 	if v1 != nil { | ||||
| 		return *v1 | ||||
| 	} | ||||
|  | ||||
| 	return def | ||||
| } | ||||
|  | ||||
| func CoalesceOpt[T any](v1 *T, v2 *T) *T { | ||||
| 	if v1 != nil { | ||||
| 		return v1 | ||||
| 	} | ||||
|  | ||||
| 	return v2 | ||||
| } | ||||
|  | ||||
| func Coalesce3[T any](v1 *T, v2 *T, def T) T { | ||||
| 	if v1 != nil { | ||||
| 		return *v1 | ||||
| 	} | ||||
|  | ||||
| 	if v2 != nil { | ||||
| 		return *v2 | ||||
| 	} | ||||
|  | ||||
| 	return def | ||||
| } | ||||
|  | ||||
| func Coalesce3Opt[T any](v1 *T, v2 *T, v3 *T) *T { | ||||
| 	if v1 != nil { | ||||
| 		return v1 | ||||
| 	} | ||||
|  | ||||
| 	if v2 != nil { | ||||
| 		return v2 | ||||
| 	} | ||||
|  | ||||
| 	return v3 | ||||
| } | ||||
|  | ||||
| func Coalesce4[T any](v1 *T, v2 *T, v3 *T, def T) T { | ||||
| 	if v1 != nil { | ||||
| 		return *v1 | ||||
| 	} | ||||
|  | ||||
| 	if v2 != nil { | ||||
| 		return *v2 | ||||
| 	} | ||||
|  | ||||
| 	if v3 != nil { | ||||
| 		return *v3 | ||||
| 	} | ||||
|  | ||||
| 	return def | ||||
| } | ||||
|  | ||||
| func Coalesce4Opt[T any](v1 *T, v2 *T, v3 *T, v4 *T) *T { | ||||
| 	if v1 != nil { | ||||
| 		return v1 | ||||
| 	} | ||||
|  | ||||
| 	if v2 != nil { | ||||
| 		return v2 | ||||
| 	} | ||||
|  | ||||
| 	if v3 != nil { | ||||
| 		return v3 | ||||
| 	} | ||||
|  | ||||
| 	return v4 | ||||
| } | ||||
|  | ||||
| func CoalesceString(s *string, def string) string { | ||||
|   | ||||
| @@ -63,3 +63,51 @@ func PatchRemJson[JV string | []byte](rawjson JV, key string) (JV, error) { | ||||
|  | ||||
| 	return JV(newjson), nil | ||||
| } | ||||
|  | ||||
| func MarshalJsonOrPanic(v any) string { | ||||
| 	bin, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return string(bin) | ||||
| } | ||||
|  | ||||
| func MarshalJsonOrDefault(v any, def string) string { | ||||
| 	bin, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return def | ||||
| 	} | ||||
| 	return string(bin) | ||||
| } | ||||
|  | ||||
| func MarshalJsonOrNil(v any) *string { | ||||
| 	bin, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return Ptr(string(bin)) | ||||
| } | ||||
|  | ||||
| func MarshalJsonIndentOrPanic(v any, prefix, indent string) string { | ||||
| 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return string(bin) | ||||
| } | ||||
|  | ||||
| func MarshalJsonIndentOrDefault(v any, prefix, indent string, def string) string { | ||||
| 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||
| 	if err != nil { | ||||
| 		return def | ||||
| 	} | ||||
| 	return string(bin) | ||||
| } | ||||
|  | ||||
| func MarshalJsonIndentOrNil(v any, prefix, indent string) *string { | ||||
| 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||
| 	if err != nil { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return Ptr(string(bin)) | ||||
| } | ||||
|   | ||||
| @@ -71,3 +71,19 @@ func ForceMap[K comparable, V any](v map[K]V) map[K]V { | ||||
| 		return v | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func MapMerge[K comparable, V any](base map[K]V, arr ...map[K]V) map[K]V { | ||||
| 	res := make(map[K]V, len(base)*(1+len(arr))) | ||||
|  | ||||
| 	for k, v := range base { | ||||
| 		res[k] = v | ||||
| 	} | ||||
|  | ||||
| 	for _, m := range arr { | ||||
| 		for k, v := range m { | ||||
| 			res[k] = v | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return res | ||||
| } | ||||
|   | ||||
							
								
								
									
										21
									
								
								langext/must.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								langext/must.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| package langext | ||||
|  | ||||
| // Must returns a value and panics on error | ||||
| // | ||||
| // Usage: Must(methodWithError(...)) | ||||
| func Must[T any](v T, err error) T { | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| // MustBool returns a value and panics on missing | ||||
| // | ||||
| // Usage: MustBool(methodWithOkayReturn(...)) | ||||
| func MustBool[T any](v T, ok bool) T { | ||||
| 	if !ok { | ||||
| 		panic("not ok") | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
							
								
								
									
										19
									
								
								langext/object.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								langext/object.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| package langext | ||||
|  | ||||
| import "encoding/json" | ||||
|  | ||||
| func DeepCopyByJson[T any](v T) (T, error) { | ||||
|  | ||||
| 	bin, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return *new(T), err | ||||
| 	} | ||||
|  | ||||
| 	var result T | ||||
| 	err = json.Unmarshal(bin, &result) | ||||
| 	if err != nil { | ||||
| 		return *new(T), err | ||||
| 	} | ||||
|  | ||||
| 	return result, nil | ||||
| } | ||||
| @@ -27,6 +27,10 @@ func DblPtrNil[T any]() **T { | ||||
| 	return &v | ||||
| } | ||||
|  | ||||
| func ArrPtr[T any](v ...T) *[]T { | ||||
| 	return &v | ||||
| } | ||||
|  | ||||
| func PtrInt32(v int32) *int32 { | ||||
| 	return &v | ||||
| } | ||||
|   | ||||
| @@ -8,12 +8,28 @@ func Sort[T OrderedConstraint](arr []T) { | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func AsSorted[T OrderedConstraint](arr []T) []T { | ||||
| 	arr = ArrCopy(arr) | ||||
| 	sort.Slice(arr, func(i1, i2 int) bool { | ||||
| 		return arr[i1] < arr[i2] | ||||
| 	}) | ||||
| 	return arr | ||||
| } | ||||
|  | ||||
| func SortStable[T OrderedConstraint](arr []T) { | ||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||
| 		return arr[i1] < arr[i2] | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func AsSortedStable[T OrderedConstraint](arr []T) []T { | ||||
| 	arr = ArrCopy(arr) | ||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||
| 		return arr[i1] < arr[i2] | ||||
| 	}) | ||||
| 	return arr | ||||
| } | ||||
|  | ||||
| func IsSorted[T OrderedConstraint](arr []T) bool { | ||||
| 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | ||||
| 		return arr[i1] < arr[i2] | ||||
| @@ -26,12 +42,28 @@ func SortSlice[T any](arr []T, less func(v1, v2 T) bool) { | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func AsSortedSlice[T any](arr []T, less func(v1, v2 T) bool) []T { | ||||
| 	arr = ArrCopy(arr) | ||||
| 	sort.Slice(arr, func(i1, i2 int) bool { | ||||
| 		return less(arr[i1], arr[i2]) | ||||
| 	}) | ||||
| 	return arr | ||||
| } | ||||
|  | ||||
| func SortSliceStable[T any](arr []T, less func(v1, v2 T) bool) { | ||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||
| 		return less(arr[i1], arr[i2]) | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func AsSortedSliceStable[T any](arr []T, less func(v1, v2 T) bool) []T { | ||||
| 	arr = ArrCopy(arr) | ||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||
| 		return less(arr[i1], arr[i2]) | ||||
| 	}) | ||||
| 	return arr | ||||
| } | ||||
|  | ||||
| func IsSliceSorted[T any](arr []T, less func(v1, v2 T) bool) bool { | ||||
| 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | ||||
| 		return less(arr[i1], arr[i2]) | ||||
| @@ -44,12 +76,28 @@ func SortBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TEle | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func AsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) []TElem { | ||||
| 	arr = ArrCopy(arr) | ||||
| 	sort.Slice(arr, func(i1, i2 int) bool { | ||||
| 		return selector(arr[i1]) < selector(arr[i2]) | ||||
| 	}) | ||||
| 	return arr | ||||
| } | ||||
|  | ||||
| func SortByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | ||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||
| 		return selector(arr[i1]) < selector(arr[i2]) | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func AsSortedByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) []TElem { | ||||
| 	arr = ArrCopy(arr) | ||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||
| 		return selector(arr[i1]) < selector(arr[i2]) | ||||
| 	}) | ||||
| 	return arr | ||||
| } | ||||
|  | ||||
| func IsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | ||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||
| 		return selector(arr[i1]) < selector(arr[i2]) | ||||
|   | ||||
							
								
								
									
										29
									
								
								langext/url.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								langext/url.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| package langext | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| func BuildUrl(url, path string, params *map[string]string) string { | ||||
| 	if path[:1] == "/" && url[len(url)-1:] == "/" { | ||||
| 		url += path[1:] | ||||
| 	} else if path[:1] != "/" && url[len(url)-1:] != "/" { | ||||
| 		url += "/" + path | ||||
| 	} else { | ||||
| 		url += path | ||||
| 	} | ||||
|  | ||||
| 	if params == nil { | ||||
| 		return url | ||||
| 	} | ||||
|  | ||||
| 	for key, value := range *params { | ||||
| 		if strings.Contains(url, "?") { | ||||
| 			url += fmt.Sprintf("&%s=%s", key, value) | ||||
| 		} else { | ||||
| 			url += fmt.Sprintf("?%s=%s", key, value) | ||||
| 		} | ||||
| 	} | ||||
| 	return url | ||||
| } | ||||
							
								
								
									
										45
									
								
								langext/url_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								langext/url_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,45 @@ | ||||
| package langext | ||||
|  | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| func TestBuildUrl(t *testing.T) { | ||||
| 	tests := []struct { | ||||
| 		Url    string | ||||
| 		Path   string | ||||
| 		Params *map[string]string | ||||
| 		Want   string | ||||
| 	}{ | ||||
| 		{ | ||||
| 			Url:    "https://test.heydyno.de/", | ||||
| 			Path:   "/testing-01", | ||||
| 			Params: &map[string]string{"param1": "value1"}, | ||||
| 			Want:   "https://test.heydyno.de/testing-01?param1=value1", | ||||
| 		}, | ||||
| 		{ | ||||
| 			Url:    "https://test.heydyno.de", | ||||
| 			Path:   "testing-01", | ||||
| 			Params: &map[string]string{"param1": "value1"}, | ||||
| 			Want:   "https://test.heydyno.de/testing-01?param1=value1", | ||||
| 		}, | ||||
| 		{ | ||||
| 			Url:    "https://test.heydyno.de", | ||||
| 			Path:   "/testing-01", | ||||
| 			Params: nil, | ||||
| 			Want:   "https://test.heydyno.de/testing-01", | ||||
| 		}, | ||||
| 		{ | ||||
| 			Url:    "https://test.heydyno.de/", | ||||
| 			Path:   "testing-01", | ||||
| 			Params: nil, | ||||
| 			Want:   "https://test.heydyno.de/testing-01", | ||||
| 		}, | ||||
| 	} | ||||
|  | ||||
| 	for _, test := range tests { | ||||
| 		res := BuildUrl(test.Url, test.Path, test.Params) | ||||
| 		tst.AssertEqual(t, res, test.Want) | ||||
| 	} | ||||
| } | ||||
| @@ -5,7 +5,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/mongo" | ||||
| ) | ||||
|  | ||||
| type Filter interface { | ||||
| type MongoFilter interface { | ||||
| 	FilterQuery() mongo.Pipeline | ||||
| 	Sort() bson.D | ||||
| } | ||||
| @@ -23,6 +23,6 @@ func (d dynamicFilter) Sort() bson.D { | ||||
| 	return d.sort | ||||
| } | ||||
|  | ||||
| func CreateFilter(pipeline mongo.Pipeline, sort bson.D) Filter { | ||||
| func CreateFilter(pipeline mongo.Pipeline, sort bson.D) MongoFilter { | ||||
| 	return dynamicFilter{pipeline: pipeline, sort: sort} | ||||
| } | ||||
|   | ||||
							
								
								
									
										102
									
								
								reflectext/convertToMap.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										102
									
								
								reflectext/convertToMap.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,102 @@ | ||||
| package reflectext | ||||
|  | ||||
| import ( | ||||
| 	"encoding/json" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| ) | ||||
|  | ||||
| type ConvertStructToMapOpt struct { | ||||
| 	KeepJsonMarshalTypes bool | ||||
| 	MaxDepth             *int | ||||
| } | ||||
|  | ||||
| func ConvertStructToMap(v any, opts ...ConvertStructToMapOpt) map[string]any { | ||||
| 	opt := ConvertStructToMapOpt{} | ||||
| 	if len(opts) > 0 { | ||||
| 		opt = opts[0] | ||||
| 	} | ||||
|  | ||||
| 	res := reflectToMap(reflect.ValueOf(v), 1, opt) | ||||
|  | ||||
| 	if v, ok := res.(map[string]any); ok { | ||||
| 		return v | ||||
| 	} else if langext.IsNil(res) { | ||||
| 		return nil | ||||
| 	} else { | ||||
| 		panic("not an object") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func reflectToMap(fv reflect.Value, depth int, opt ConvertStructToMapOpt) any { | ||||
|  | ||||
| 	if opt.MaxDepth != nil && depth > *opt.MaxDepth { | ||||
| 		return fv.Interface() | ||||
| 	} | ||||
|  | ||||
| 	if fv.Kind() == reflect.Ptr { | ||||
|  | ||||
| 		if fv.IsNil() { | ||||
| 			return nil | ||||
| 		} else { | ||||
| 			return reflectToMap(fv.Elem(), depth, opt) | ||||
| 		} | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	if fv.Kind() == reflect.Func { | ||||
|  | ||||
| 		// skip | ||||
| 		return nil | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	if fv.Kind() == reflect.Array { | ||||
|  | ||||
| 		arrlen := fv.Len() | ||||
| 		arr := make([]any, arrlen) | ||||
| 		for i := 0; i < arrlen; i++ { | ||||
| 			arr[i] = reflectToMap(fv.Index(i), depth+1, opt) | ||||
| 		} | ||||
| 		return arr | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	if fv.Kind() == reflect.Slice { | ||||
|  | ||||
| 		arrlen := fv.Len() | ||||
| 		arr := make([]any, arrlen) | ||||
| 		for i := 0; i < arrlen; i++ { | ||||
| 			arr[i] = reflectToMap(fv.Index(i), depth+1, opt) | ||||
| 		} | ||||
| 		return arr | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	if fv.Kind() == reflect.Chan { | ||||
|  | ||||
| 		// skip | ||||
| 		return nil | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	if fv.Kind() == reflect.Struct { | ||||
|  | ||||
| 		if opt.KeepJsonMarshalTypes && fv.Type().Implements(reflect.TypeFor[json.Marshaler]()) { | ||||
| 			return fv.Interface() | ||||
| 		} | ||||
|  | ||||
| 		res := make(map[string]any) | ||||
|  | ||||
| 		for i := 0; i < fv.NumField(); i++ { | ||||
| 			if fv.Type().Field(i).IsExported() { | ||||
| 				res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i), depth+1, opt) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		return res | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	return fv.Interface() | ||||
| } | ||||
							
								
								
									
										42
									
								
								reflectext/convertToMap_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								reflectext/convertToMap_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,42 @@ | ||||
| package reflectext | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"testing" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| func TestConvertStructToMap(t *testing.T) { | ||||
|  | ||||
| 	type tst struct { | ||||
| 		FieldA  int | ||||
| 		FieldB  string | ||||
| 		FieldC  time.Time | ||||
| 		FieldD  []float64 | ||||
| 		FieldE1 *int | ||||
| 		FieldE2 **int | ||||
| 		FieldE3 *int | ||||
| 		FieldE4 **int | ||||
| 		FieldE5 *int | ||||
| 		FieldE6 **int | ||||
| 	} | ||||
|  | ||||
| 	value := tst{ | ||||
| 		FieldA:  123, | ||||
| 		FieldB:  "hello", | ||||
| 		FieldC:  time.Date(2020, 05, 12, 8, 30, 0, 0, time.UTC), | ||||
| 		FieldD:  []float64{1, 2, 3, 4, 5, 6, 7}, | ||||
| 		FieldE1: nil, | ||||
| 		FieldE2: nil, | ||||
| 		FieldE3: langext.Ptr(12), | ||||
| 		FieldE4: langext.DblPtr(12), | ||||
| 		FieldE5: nil, | ||||
| 		FieldE6: langext.DblPtrNil[int](), | ||||
| 	} | ||||
|  | ||||
| 	valueOut := ConvertStructToMap(value, ConvertStructToMapOpt{KeepJsonMarshalTypes: true}) | ||||
|  | ||||
| 	fmt.Printf("%+v\n", valueOut) | ||||
|  | ||||
| } | ||||
							
								
								
									
										98
									
								
								reflectext/mapAccess.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										98
									
								
								reflectext/mapAccess.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,98 @@ | ||||
| package reflectext | ||||
|  | ||||
| import ( | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| // GetMapPath returns the value deep inside a hierahically nested map structure | ||||
| // eg: | ||||
| // x := langext.H{"K1": langext.H{"K2": 665}} | ||||
| // GetMapPath[int](x, "K1.K2") == 665 | ||||
| func GetMapPath[TData any](mapval any, path string) (TData, bool) { | ||||
| 	var ok bool | ||||
|  | ||||
| 	split := strings.Split(path, ".") | ||||
|  | ||||
| 	for i, key := range split { | ||||
|  | ||||
| 		if i < len(split)-1 { | ||||
| 			mapval, ok = GetMapField[any](mapval, key) | ||||
| 			if !ok { | ||||
| 				return *new(TData), false | ||||
| 			} | ||||
| 		} else { | ||||
| 			return GetMapField[TData](mapval, key) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return *new(TData), false | ||||
| } | ||||
|  | ||||
| // GetMapField gets the value of a map, without knowing the actual types (mapval is any) | ||||
| // eg: | ||||
| // x := langext.H{"K1": 665} | ||||
| // GetMapPath[int](x, "K1") == 665 | ||||
| // | ||||
| // works with aliased types and autom. dereferences pointes | ||||
| func GetMapField[TData any, TKey comparable](mapval any, key TKey) (TData, bool) { | ||||
|  | ||||
| 	rval := reflect.ValueOf(mapval) | ||||
|  | ||||
| 	for rval.Kind() == reflect.Ptr && !rval.IsNil() { | ||||
| 		rval = rval.Elem() | ||||
| 	} | ||||
|  | ||||
| 	if rval.Kind() != reflect.Map { | ||||
| 		return *new(TData), false // mapval is not a map | ||||
| 	} | ||||
|  | ||||
| 	kval := reflect.ValueOf(key) | ||||
|  | ||||
| 	if !kval.Type().AssignableTo(rval.Type().Key()) { | ||||
| 		return *new(TData), false // key cannot index mapval | ||||
| 	} | ||||
|  | ||||
| 	eval := rval.MapIndex(kval) | ||||
| 	if !eval.IsValid() { | ||||
| 		return *new(TData), false // key does not exist in mapval | ||||
| 	} | ||||
|  | ||||
| 	destType := reflect.TypeOf(new(TData)).Elem() | ||||
|  | ||||
| 	if eval.Type() == destType { | ||||
| 		return eval.Interface().(TData), true | ||||
| 	} | ||||
|  | ||||
| 	if eval.CanConvert(destType) && !preventConvert(eval.Type(), destType) { | ||||
| 		return eval.Convert(destType).Interface().(TData), true | ||||
| 	} | ||||
|  | ||||
| 	if (eval.Kind() == reflect.Ptr || eval.Kind() == reflect.Interface) && eval.IsNil() && destType.Kind() == reflect.Ptr { | ||||
| 		return *new(TData), false // special case: mapval[key] is nil | ||||
| 	} | ||||
|  | ||||
| 	for (eval.Kind() == reflect.Ptr || eval.Kind() == reflect.Interface) && !eval.IsNil() { | ||||
| 		eval = eval.Elem() | ||||
|  | ||||
| 		if eval.Type() == destType { | ||||
| 			return eval.Interface().(TData), true | ||||
| 		} | ||||
|  | ||||
| 		if eval.CanConvert(destType) && !preventConvert(eval.Type(), destType) { | ||||
| 			return eval.Convert(destType).Interface().(TData), true | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return *new(TData), false // mapval[key] is not of type TData | ||||
| } | ||||
|  | ||||
| func preventConvert(t1 reflect.Type, t2 reflect.Type) bool { | ||||
| 	if t1.Kind() == reflect.String && t1.Kind() != reflect.String { | ||||
| 		return true | ||||
| 	} | ||||
| 	if t2.Kind() == reflect.String && t1.Kind() != reflect.String { | ||||
| 		return true | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
							
								
								
									
										49
									
								
								reflectext/mapAccess_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										49
									
								
								reflectext/mapAccess_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,49 @@ | ||||
| package reflectext | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| func TestGetMapPath(t *testing.T) { | ||||
| 	type PseudoInt = int64 | ||||
|  | ||||
| 	mymap2 := map[string]map[string]any{"Test": {"Second": 3}} | ||||
|  | ||||
| 	var maany2 any = mymap2 | ||||
|  | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test.Second")), "3 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test2.Second")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test.Second2")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test.Second")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test2.Second")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test.Second2")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test.Second")), "3 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test2.Second")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test.Second2")), "0 false") | ||||
| } | ||||
|  | ||||
| func TestGetMapField(t *testing.T) { | ||||
| 	type PseudoInt = int64 | ||||
|  | ||||
| 	mymap1 := map[string]any{"Test": 12} | ||||
| 	mymap2 := map[string]int{"Test": 12} | ||||
|  | ||||
| 	var maany1 any = mymap1 | ||||
| 	var maany2 any = mymap2 | ||||
|  | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany1, "Test")), "12 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany1, "Test2")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany1, "Test")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany1, "Test2")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany1, "Test")), "12 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany1, "Test2")), "0 false") | ||||
|  | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany2, "Test")), "12 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany2, "Test2")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany2, "Test")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany2, "Test2")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test")), "12 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test2")), "0 false") | ||||
| } | ||||
| @@ -9,6 +9,8 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| @@ -65,36 +67,20 @@ func (t *Date) UnmarshalJSON(data []byte) error { | ||||
| 	if err := json.Unmarshal(data, &str); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	t0, err := time.Parse(t.FormatStr(), str) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	t.Year = t0.Year() | ||||
| 	t.Month = int(t0.Month()) | ||||
| 	t.Day = t0.Day() | ||||
| 	return nil | ||||
| 	return t.ParseString(str) | ||||
| } | ||||
|  | ||||
| func (t Date) MarshalJSON() ([]byte, error) { | ||||
| 	str := t.TimeUTC().Format(t.FormatStr()) | ||||
| 	str := t.String() | ||||
| 	return json.Marshal(str) | ||||
| } | ||||
|  | ||||
| func (t Date) MarshalText() ([]byte, error) { | ||||
| 	b := make([]byte, 0, len(t.FormatStr())) | ||||
| 	return t.TimeUTC().AppendFormat(b, t.FormatStr()), nil | ||||
| 	return []byte(t.String()), nil | ||||
| } | ||||
|  | ||||
| func (t *Date) UnmarshalText(data []byte) error { | ||||
| 	var err error | ||||
| 	v, err := time.Parse(t.FormatStr(), string(data)) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	t.Year = v.Year() | ||||
| 	t.Month = int(v.Month()) | ||||
| 	t.Day = v.Day() | ||||
| 	return nil | ||||
| 	return t.ParseString(string(data)) | ||||
| } | ||||
|  | ||||
| func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||
| @@ -116,6 +102,13 @@ func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if tt == "" { | ||||
| 		t.Year = 0 | ||||
| 		t.Month = 0 | ||||
| 		t.Day = 0 | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	v, err := time.Parse(t.FormatStr(), tt) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| @@ -128,7 +121,10 @@ func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||
| } | ||||
|  | ||||
| func (t Date) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||
| 	return bson.MarshalValue(t.TimeUTC().Format(t.FormatStr())) | ||||
| 	if t.IsZero() { | ||||
| 		return bson.MarshalValue("") | ||||
| 	} | ||||
| 	return bson.MarshalValue(t.String()) | ||||
| } | ||||
|  | ||||
| func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||
| @@ -164,7 +160,7 @@ func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val | ||||
| } | ||||
|  | ||||
| func (t Date) Serialize() string { | ||||
| 	return t.TimeUTC().Format(t.FormatStr()) | ||||
| 	return t.String() | ||||
| } | ||||
|  | ||||
| func (t Date) FormatStr() string { | ||||
| @@ -212,11 +208,52 @@ func (t Date) Format(layout string) string { | ||||
| } | ||||
|  | ||||
| func (t Date) GoString() string { | ||||
| 	return t.TimeUTC().GoString() | ||||
| 	return fmt.Sprintf("rfctime.Date{Year: %d, Month: %d, Day: %d}", t.Year, t.Month, t.Day) | ||||
| } | ||||
|  | ||||
| func (t Date) String() string { | ||||
| 	return t.TimeUTC().String() | ||||
| 	return fmt.Sprintf("%04d-%02d-%02d", t.Year, t.Month, t.Day) | ||||
| } | ||||
|  | ||||
| func (t *Date) ParseString(v string) error { | ||||
| 	split := strings.Split(v, "-") | ||||
| 	if len(split) != 3 { | ||||
| 		return errors.New("invalid date format: " + v) | ||||
| 	} | ||||
| 	year, err := strconv.ParseInt(split[0], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
| 	month, err := strconv.ParseInt(split[1], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
| 	day, err := strconv.ParseInt(split[2], 10, 32) | ||||
| 	if err != nil { | ||||
| 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||
| 	} | ||||
|  | ||||
| 	if year < 0 { | ||||
| 		return errors.New("invalid date format: " + v + ": year is negative") | ||||
| 	} | ||||
|  | ||||
| 	if month < 1 || month > 12 { | ||||
| 		return errors.New("invalid date format: " + v + ": month is out of range") | ||||
| 	} | ||||
|  | ||||
| 	if day < 1 || day > 31 { | ||||
| 		return errors.New("invalid date format: " + v + ": day is out of range") | ||||
| 	} | ||||
|  | ||||
| 	t.Year = int(year) | ||||
| 	t.Month = int(month) | ||||
| 	t.Day = int(day) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (t Date) IsZero() bool { | ||||
| 	return t.Year == 0 && t.Month == 0 && t.Day == 0 | ||||
| } | ||||
|  | ||||
| func NewDate(t time.Time) Date { | ||||
|   | ||||
| @@ -8,6 +8,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"time" | ||||
| ) | ||||
| @@ -245,6 +246,13 @@ func NewRFC3339(t time.Time) RFC3339Time { | ||||
| 	return RFC3339Time(t) | ||||
| } | ||||
|  | ||||
| func NewRFC3339Ptr(t *time.Time) *RFC3339Time { | ||||
| 	if t == nil { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return langext.Ptr(RFC3339Time(*t)) | ||||
| } | ||||
|  | ||||
| func NowRFC3339() RFC3339Time { | ||||
| 	return RFC3339Time(time.Now()) | ||||
| } | ||||
|   | ||||
| @@ -8,6 +8,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"time" | ||||
| ) | ||||
| @@ -245,6 +246,13 @@ func NewRFC3339Nano(t time.Time) RFC3339NanoTime { | ||||
| 	return RFC3339NanoTime(t) | ||||
| } | ||||
|  | ||||
| func NewRFC3339NanoPtr(t *time.Time) *RFC3339NanoTime { | ||||
| 	if t == nil { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return langext.Ptr(RFC3339NanoTime(*t)) | ||||
| } | ||||
|  | ||||
| func NowRFC3339Nano() RFC3339NanoTime { | ||||
| 	return RFC3339NanoTime(time.Now()) | ||||
| } | ||||
|   | ||||
							
								
								
									
										148
									
								
								rfctime/time.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										148
									
								
								rfctime/time.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,148 @@ | ||||
| package rfctime | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type Time struct { | ||||
| 	Hour       int | ||||
| 	Minute     int | ||||
| 	Second     int | ||||
| 	NanoSecond int | ||||
| } | ||||
|  | ||||
| func (t Time) Serialize() string { | ||||
| 	return fmt.Sprintf("%04d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond) | ||||
| } | ||||
|  | ||||
| func (t Time) SerializeShort() string { | ||||
| 	if t.NanoSecond == 0 && t.Second == 0 { | ||||
| 		return fmt.Sprintf("%02d:%02d", t.Hour, t.Minute) | ||||
| 	} else if t.NanoSecond == 0 { | ||||
| 		return fmt.Sprintf("%02d:%02d:%02d", t.Hour, t.Minute, t.Second) | ||||
| 	} else { | ||||
| 		return fmt.Sprintf("%02d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (t *Time) Deserialize(v string) error { | ||||
|  | ||||
| 	var h, m, s, ns string | ||||
|  | ||||
| 	split1 := strings.Split(v, ".") | ||||
|  | ||||
| 	if len(split1) == 2 { | ||||
|  | ||||
| 		split2 := strings.Split(split1[0], ":") | ||||
| 		if len(split2) == 3 { | ||||
|  | ||||
| 			h = split2[0] | ||||
| 			m = split2[1] | ||||
| 			s = split2[2] | ||||
| 			ns = split1[1] | ||||
|  | ||||
| 		} else { | ||||
| 			return fmt.Errorf("invalid time format: %s", v) | ||||
| 		} | ||||
|  | ||||
| 	} else if len(split1) == 1 { | ||||
|  | ||||
| 		split2 := strings.Split(split1[0], ":") | ||||
| 		if len(split2) == 2 { | ||||
|  | ||||
| 			h = split2[0] | ||||
| 			m = split2[1] | ||||
| 			s = "0" | ||||
| 			ns = "0" | ||||
|  | ||||
| 		} else if len(split2) == 3 { | ||||
|  | ||||
| 			h = split2[0] | ||||
| 			m = split2[1] | ||||
| 			s = split2[2] | ||||
| 			ns = "0" | ||||
|  | ||||
| 		} else { | ||||
| 			return fmt.Errorf("invalid time format: %s", v) | ||||
| 		} | ||||
|  | ||||
| 	} else { | ||||
| 		return fmt.Errorf("invalid time format: %s", v) | ||||
| 	} | ||||
|  | ||||
| 	ns = langext.StrPadRight(ns, "0", 9) | ||||
|  | ||||
| 	hh, err := strconv.ParseInt(h, 10, 32) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("invalid time format: %s", v) | ||||
| 	} | ||||
|  | ||||
| 	mm, err := strconv.ParseInt(m, 10, 32) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("invalid time format: %s", v) | ||||
| 	} | ||||
|  | ||||
| 	ss, err := strconv.ParseInt(s, 10, 32) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("invalid time format: %s", v) | ||||
| 	} | ||||
|  | ||||
| 	nss, err := strconv.ParseInt(ns, 10, 32) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("invalid time format: %s", v) | ||||
| 	} | ||||
|  | ||||
| 	t.Hour = int(hh) | ||||
| 	t.Minute = int(mm) | ||||
| 	t.Second = int(ss) | ||||
| 	t.NanoSecond = int(nss) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (t Time) FormatStr() string { | ||||
| 	return "15:04:05.999999999" | ||||
| } | ||||
|  | ||||
| func (t Time) GoString() string { | ||||
| 	return fmt.Sprintf("rfctime.NewTime(%d, %d, %d, %d)", t.Hour, t.Minute, t.Second, t.NanoSecond) | ||||
| } | ||||
|  | ||||
| func (t Time) String() string { | ||||
| 	return fmt.Sprintf("%04d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond) | ||||
| } | ||||
|  | ||||
| func NewTime(h int, m int, s int, ns int) Time { | ||||
| 	return Time{ | ||||
| 		Hour:       h, | ||||
| 		Minute:     m, | ||||
| 		Second:     s, | ||||
| 		NanoSecond: ns, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewTimeFromTS(t time.Time) Time { | ||||
| 	return Time{ | ||||
| 		Hour:       t.Hour(), | ||||
| 		Minute:     t.Minute(), | ||||
| 		Second:     t.Second(), | ||||
| 		NanoSecond: t.Nanosecond(), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NowTime(loc *time.Location) Time { | ||||
| 	now := time.Now().In(loc) | ||||
| 	return NewTime(now.Hour(), now.Minute(), now.Second(), now.Nanosecond()) | ||||
| } | ||||
|  | ||||
| func NowTimeLoc() Time { | ||||
| 	return NowTime(time.UTC) | ||||
| } | ||||
|  | ||||
| func NowTimeUTC() Time { | ||||
| 	return NowTime(time.Local) | ||||
| } | ||||
| @@ -8,6 +8,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| 	"time" | ||||
| @@ -239,6 +240,13 @@ func NewUnix(t time.Time) UnixTime { | ||||
| 	return UnixTime(t) | ||||
| } | ||||
|  | ||||
| func NewUnixPtr(t *time.Time) *UnixTime { | ||||
| 	if t == nil { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return langext.Ptr(UnixTime(*t)) | ||||
| } | ||||
|  | ||||
| func NowUnix() UnixTime { | ||||
| 	return UnixTime(time.Now()) | ||||
| } | ||||
|   | ||||
| @@ -8,6 +8,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| 	"time" | ||||
| @@ -239,6 +240,13 @@ func NewUnixMilli(t time.Time) UnixMilliTime { | ||||
| 	return UnixMilliTime(t) | ||||
| } | ||||
|  | ||||
| func NewUnixMilliPtr(t *time.Time) *UnixMilliTime { | ||||
| 	if t == nil { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return langext.Ptr(UnixMilliTime(*t)) | ||||
| } | ||||
|  | ||||
| func NowUnixMilli() UnixMilliTime { | ||||
| 	return UnixMilliTime(time.Now()) | ||||
| } | ||||
|   | ||||
| @@ -8,6 +8,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| 	"time" | ||||
| @@ -239,6 +240,13 @@ func NewUnixNano(t time.Time) UnixNanoTime { | ||||
| 	return UnixNanoTime(t) | ||||
| } | ||||
|  | ||||
| func NewUnixNanoPtr(t *time.Time) *UnixNanoTime { | ||||
| 	if t == nil { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return langext.Ptr(UnixNanoTime(*t)) | ||||
| } | ||||
|  | ||||
| func NowUnixNano() UnixNanoTime { | ||||
| 	return UnixNanoTime(time.Now()) | ||||
| } | ||||
|   | ||||
							
								
								
									
										132
									
								
								sq/builder.go
									
									
									
									
									
								
							
							
						
						
									
										132
									
								
								sq/builder.go
									
									
									
									
									
								
							| @@ -1,13 +1,14 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn string) (string, PP, error) { | ||||
| func BuildUpdateStatement[TData any](q Queryable, tableName string, obj TData, idColumn string) (string, PP, error) { | ||||
| 	rval := reflect.ValueOf(obj) | ||||
| 	rtyp := rval.Type() | ||||
|  | ||||
| @@ -53,7 +54,7 @@ func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn strin | ||||
| 				return "", nil, err | ||||
| 			} | ||||
|  | ||||
| 			setClauses = append(setClauses, fmt.Sprintf("(%s = :%s)", columnName, params.Add(val))) | ||||
| 			setClauses = append(setClauses, fmt.Sprintf("%s = :%s", columnName, params.Add(val))) | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
| @@ -69,3 +70,130 @@ func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn strin | ||||
| 	//goland:noinspection SqlNoDataSourceInspection | ||||
| 	return fmt.Sprintf("UPDATE %s SET %s WHERE %s", tableName, strings.Join(setClauses, ", "), matchClause), params, nil | ||||
| } | ||||
|  | ||||
| func BuildInsertStatement[TData any](q Queryable, tableName string, obj TData) (string, PP, error) { | ||||
| 	rval := reflect.ValueOf(obj) | ||||
| 	rtyp := rval.Type() | ||||
|  | ||||
| 	params := PP{} | ||||
|  | ||||
| 	fields := make([]string, 0) | ||||
| 	values := make([]string, 0) | ||||
|  | ||||
| 	for i := 0; i < rtyp.NumField(); i++ { | ||||
|  | ||||
| 		rsfield := rtyp.Field(i) | ||||
| 		rvfield := rval.Field(i) | ||||
|  | ||||
| 		if !rsfield.IsExported() { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		columnName := rsfield.Tag.Get("db") | ||||
| 		if columnName == "" || columnName == "-" { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		if rsfield.Type.Kind() == reflect.Ptr && rvfield.IsNil() { | ||||
|  | ||||
| 			fields = append(fields, columnName) | ||||
| 			values = append(values, "NULL") | ||||
|  | ||||
| 		} else { | ||||
|  | ||||
| 			val, err := convertValueToDB(q, rvfield.Interface()) | ||||
| 			if err != nil { | ||||
| 				return "", nil, err | ||||
| 			} | ||||
|  | ||||
| 			fields = append(fields, columnName) | ||||
| 			values = append(values, ":"+params.Add(val)) | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if len(fields) == 0 { | ||||
| 		return "", nil, exerr.New(exerr.TypeSQLBuild, "no fields found in object").Build() | ||||
| 	} | ||||
|  | ||||
| 	//goland:noinspection SqlNoDataSourceInspection | ||||
| 	return fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", tableName, strings.Join(fields, ", "), strings.Join(values, ", ")), params, nil | ||||
| } | ||||
|  | ||||
| func BuildInsertMultipleStatement[TData any](q Queryable, tableName string, vArr []TData) (string, PP, error) { | ||||
|  | ||||
| 	if len(vArr) == 0 { | ||||
| 		return "", nil, errors.New("no data supplied") | ||||
| 	} | ||||
|  | ||||
| 	rtyp := reflect.ValueOf(vArr[0]).Type() | ||||
|  | ||||
| 	sqlPrefix := "" | ||||
| 	{ | ||||
| 		columns := make([]string, 0) | ||||
|  | ||||
| 		for i := 0; i < rtyp.NumField(); i++ { | ||||
| 			rsfield := rtyp.Field(i) | ||||
|  | ||||
| 			if !rsfield.IsExported() { | ||||
| 				continue | ||||
| 			} | ||||
|  | ||||
| 			columnName := rsfield.Tag.Get("db") | ||||
| 			if columnName == "" || columnName == "-" { | ||||
| 				continue | ||||
| 			} | ||||
|  | ||||
| 			columns = append(columns, "\""+columnName+"\"") | ||||
| 		} | ||||
|  | ||||
| 		sqlPrefix = fmt.Sprintf("INSERT"+" INTO \"%s\" (%s) VALUES", tableName, strings.Join(columns, ", ")) | ||||
| 	} | ||||
|  | ||||
| 	pp := PP{} | ||||
|  | ||||
| 	sqlValuesArr := make([]string, 0) | ||||
|  | ||||
| 	for _, v := range vArr { | ||||
|  | ||||
| 		rval := reflect.ValueOf(v) | ||||
|  | ||||
| 		params := make([]string, 0) | ||||
|  | ||||
| 		for i := 0; i < rtyp.NumField(); i++ { | ||||
|  | ||||
| 			rsfield := rtyp.Field(i) | ||||
| 			rvfield := rval.Field(i) | ||||
|  | ||||
| 			if !rsfield.IsExported() { | ||||
| 				continue | ||||
| 			} | ||||
|  | ||||
| 			columnName := rsfield.Tag.Get("db") | ||||
| 			if columnName == "" || columnName == "-" { | ||||
| 				continue | ||||
| 			} | ||||
|  | ||||
| 			if rsfield.Type.Kind() == reflect.Ptr && rvfield.IsNil() { | ||||
|  | ||||
| 				params = append(params, "NULL") | ||||
|  | ||||
| 			} else { | ||||
|  | ||||
| 				val, err := convertValueToDB(q, rvfield.Interface()) | ||||
| 				if err != nil { | ||||
| 					return "", nil, err | ||||
| 				} | ||||
|  | ||||
| 				params = append(params, ":"+pp.Add(val)) | ||||
|  | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		sqlValuesArr = append(sqlValuesArr, fmt.Sprintf("(%s)", strings.Join(params, ", "))) | ||||
| 	} | ||||
|  | ||||
| 	sqlstr := fmt.Sprintf("%s %s", sqlPrefix, strings.Join(sqlValuesArr, ", ")) | ||||
|  | ||||
| 	return sqlstr, pp, nil | ||||
| } | ||||
|   | ||||
| @@ -52,8 +52,7 @@ func TestCreateUpdateStatement(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|   | ||||
							
								
								
									
										32
									
								
								sq/commentTrimmer.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								sq/commentTrimmer.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| var CommentTrimmer = NewPreListener(fnTrimComments) | ||||
|  | ||||
| func fnTrimComments(ctx context.Context, cmdtype string, id *uint16, sql *string, params *PP) error { | ||||
|  | ||||
| 	res := make([]string, 0) | ||||
|  | ||||
| 	for _, s := range strings.Split(*sql, "\n") { | ||||
| 		if strings.HasPrefix(strings.TrimSpace(s), "--") { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		idx := strings.Index(s, "--") | ||||
| 		if idx != -1 { | ||||
| 			s = s[:idx] | ||||
| 		} | ||||
|  | ||||
| 		s = strings.TrimRight(s, " \t\r\n") | ||||
|  | ||||
| 		res = append(res, s) | ||||
| 	} | ||||
|  | ||||
| 	*sql = strings.Join(res, "\n") | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										107
									
								
								sq/converter.go
									
									
									
									
									
								
							
							
						
						
									
										107
									
								
								sq/converter.go
									
									
									
									
									
								
							| @@ -1,13 +1,10 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"encoding/json" | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||
| 	"reflect" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type DBTypeConverter interface { | ||||
| @@ -17,95 +14,16 @@ type DBTypeConverter interface { | ||||
| 	DBToModel(v any) (any, error) | ||||
| } | ||||
|  | ||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int](func(v bool) (int, error) { | ||||
| 	return langext.Conditional(v, 1, 0), nil | ||||
| }, func(v int) (bool, error) { | ||||
| 	if v == 0 { | ||||
| 		return false, nil | ||||
| type DBDataConstraint interface { | ||||
| 	string | langext.NumberConstraint | []byte | ||||
| } | ||||
| 	if v == 1 { | ||||
| 		return true, nil | ||||
|  | ||||
| type DatabaseConvertible[TModelData any, TDBData DBDataConstraint] interface { | ||||
| 	MarshalToDB(v TModelData) (TDBData, error) | ||||
| 	UnmarshalToModel(v TDBData) (TModelData, error) | ||||
| } | ||||
| 	return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v)) | ||||
| }) | ||||
|  | ||||
| var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (time.Time, error) { | ||||
| 	return time.UnixMilli(v), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (rfctime.UnixMilliTime, error) { | ||||
| 	return rfctime.NewUnixMilli(time.UnixMilli(v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) { | ||||
| 	return v.UnixNano(), nil | ||||
| }, func(v int64) (rfctime.UnixNanoTime, error) { | ||||
| 	return rfctime.NewUnixNano(time.Unix(0, v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) { | ||||
| 	return v.Unix(), nil | ||||
| }, func(v int64) (rfctime.UnixTime, error) { | ||||
| 	return rfctime.NewUnix(time.Unix(v, 0)), nil | ||||
| }) | ||||
|  | ||||
| // ConverterRFC339TimeToString | ||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||
| var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) { | ||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil | ||||
| }, func(v string) (rfctime.RFC3339Time, error) { | ||||
| 	t, err := time.Parse("2006-01-02 15:04:05", v) | ||||
| 	if err != nil { | ||||
| 		return rfctime.RFC3339Time{}, err | ||||
| 	} | ||||
| 	return rfctime.NewRFC3339(t), nil | ||||
| }) | ||||
|  | ||||
| // ConverterRFC339NanoTimeToString | ||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||
| var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) { | ||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil | ||||
| }, func(v string) (rfctime.RFC3339NanoTime, error) { | ||||
| 	t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC) | ||||
| 	if err != nil { | ||||
| 		return rfctime.RFC3339NanoTime{}, err | ||||
| 	} | ||||
| 	return rfctime.NewRFC3339Nano(t), nil | ||||
| }) | ||||
|  | ||||
| var ConverterJsonObjToString = NewDBTypeConverter[JsonObj, string](func(v JsonObj) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| }, func(v string) (JsonObj, error) { | ||||
| 	var mrsh JsonObj | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return JsonObj{}, err | ||||
| 	} | ||||
| 	return mrsh, nil | ||||
| }) | ||||
|  | ||||
| var ConverterJsonArrToString = NewDBTypeConverter[JsonArr, string](func(v JsonArr) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| }, func(v string) (JsonArr, error) { | ||||
| 	var mrsh JsonArr | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return JsonArr{}, err | ||||
| 	} | ||||
| 	return mrsh, nil | ||||
| }) | ||||
|  | ||||
| type dbTypeConverterImpl[TModelData any, TDBData any] struct { | ||||
| type dbTypeConverterImpl[TModelData any, TDBData DBDataConstraint] struct { | ||||
| 	dbTypeString    string | ||||
| 	modelTypeString string | ||||
| 	todb            func(v TModelData) (TDBData, error) | ||||
| @@ -134,7 +52,7 @@ func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error) | ||||
| 	return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v)) | ||||
| } | ||||
|  | ||||
| func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter { | ||||
| func NewDBTypeConverter[TModelData any, TDBData DBDataConstraint](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter { | ||||
| 	return &dbTypeConverterImpl[TModelData, TDBData]{ | ||||
| 		dbTypeString:    fmt.Sprintf("%T", *new(TDBData)), | ||||
| 		modelTypeString: fmt.Sprintf("%T", *new(TModelData)), | ||||
| @@ -143,6 +61,15 @@ func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TD | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewAutoDBTypeConverter[TDBData DBDataConstraint, TModelData DatabaseConvertible[TModelData, TDBData]](obj TModelData) DBTypeConverter { | ||||
| 	return &dbTypeConverterImpl[TModelData, TDBData]{ | ||||
| 		dbTypeString:    fmt.Sprintf("%T", *new(TDBData)), | ||||
| 		modelTypeString: fmt.Sprintf("%T", *new(TModelData)), | ||||
| 		todb:            obj.MarshalToDB, | ||||
| 		tomodel:         obj.UnmarshalToModel, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func convertValueToDB(q Queryable, value any) (any, error) { | ||||
| 	modelTypeStr := fmt.Sprintf("%T", value) | ||||
|  | ||||
|   | ||||
							
								
								
									
										147
									
								
								sq/converterDefault.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										147
									
								
								sq/converterDefault.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,147 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| // ========================== COMMON DATATYPES ========================== | ||||
|  | ||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) { | ||||
| 	return langext.Conditional(v, int64(1), int64(0)), nil | ||||
| }, func(v int64) (bool, error) { | ||||
| 	if v == 0 { | ||||
| 		return false, nil | ||||
| 	} | ||||
| 	if v == 1 { | ||||
| 		return true, nil | ||||
| 	} | ||||
| 	return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v)) | ||||
| }) | ||||
|  | ||||
| var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (time.Time, error) { | ||||
| 	return time.UnixMilli(v), nil | ||||
| }) | ||||
|  | ||||
| // ========================== RFCTIME ========================== | ||||
|  | ||||
| var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (rfctime.UnixMilliTime, error) { | ||||
| 	return rfctime.NewUnixMilli(time.UnixMilli(v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) { | ||||
| 	return v.UnixNano(), nil | ||||
| }, func(v int64) (rfctime.UnixNanoTime, error) { | ||||
| 	return rfctime.NewUnixNano(time.Unix(0, v)), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) { | ||||
| 	return v.Unix(), nil | ||||
| }, func(v int64) (rfctime.UnixTime, error) { | ||||
| 	return rfctime.NewUnix(time.Unix(v, 0)), nil | ||||
| }) | ||||
|  | ||||
| // ConverterRFC339TimeToString | ||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||
| var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) { | ||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil | ||||
| }, func(v string) (rfctime.RFC3339Time, error) { | ||||
| 	t, err := time.Parse("2006-01-02 15:04:05", v) | ||||
| 	if err != nil { | ||||
| 		return rfctime.RFC3339Time{}, err | ||||
| 	} | ||||
| 	return rfctime.NewRFC3339(t), nil | ||||
| }) | ||||
|  | ||||
| // ConverterRFC339NanoTimeToString | ||||
| // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||
| var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) { | ||||
| 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil | ||||
| }, func(v string) (rfctime.RFC3339NanoTime, error) { | ||||
| 	t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC) | ||||
| 	if err != nil { | ||||
| 		return rfctime.RFC3339NanoTime{}, err | ||||
| 	} | ||||
| 	return rfctime.NewRFC3339Nano(t), nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) { | ||||
| 	return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil | ||||
| }, func(v string) (rfctime.Date, error) { | ||||
| 	d := rfctime.Date{} | ||||
| 	if err := d.ParseString(v); err != nil { | ||||
| 		return rfctime.Date{}, err | ||||
| 	} else { | ||||
| 		return d, nil | ||||
| 	} | ||||
| }) | ||||
|  | ||||
| var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) { | ||||
| 	return v.SerializeShort(), nil | ||||
| }, func(v string) (rfctime.Time, error) { | ||||
| 	res := rfctime.Time{} | ||||
| 	err := res.Deserialize(v) | ||||
| 	if err != nil { | ||||
| 		return rfctime.Time{}, err | ||||
| 	} | ||||
| 	return res, nil | ||||
| }) | ||||
|  | ||||
| var ConverterRFCSecondsF64ToString = NewDBTypeConverter[rfctime.SecondsF64, float64](func(v rfctime.SecondsF64) (float64, error) { | ||||
| 	return v.Seconds(), nil | ||||
| }, func(v float64) (rfctime.SecondsF64, error) { | ||||
| 	return rfctime.NewSecondsF64(timeext.FromSeconds(v)), nil | ||||
| }) | ||||
|  | ||||
| // ========================== JSON ========================== | ||||
|  | ||||
| var ConverterJsonObjToString = NewAutoDBTypeConverter(JsonObj{}) | ||||
|  | ||||
| var ConverterJsonArrToString = NewAutoDBTypeConverter(JsonArr{}) | ||||
|  | ||||
| // Json[T] must be registered manually for each gen-type | ||||
|  | ||||
| // ========================== EXERR ========================== | ||||
|  | ||||
| var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) { | ||||
| 	return v.Category, nil | ||||
| }, func(v string) (exerr.ErrorCategory, error) { | ||||
| 	for _, cat := range exerr.AllCategories { | ||||
| 		if cat.Category == v { | ||||
| 			return cat, nil | ||||
| 		} | ||||
| 	} | ||||
| 	return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory") | ||||
| }) | ||||
|  | ||||
| var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) { | ||||
| 	return v.Severity, nil | ||||
| }, func(v string) (exerr.ErrorSeverity, error) { | ||||
| 	for _, sev := range exerr.AllSeverities { | ||||
| 		if sev.Severity == v { | ||||
| 			return sev, nil | ||||
| 		} | ||||
| 	} | ||||
| 	return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity") | ||||
| }) | ||||
|  | ||||
| var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) { | ||||
| 	return v.Key, nil | ||||
| }, func(v string) (exerr.ErrorType, error) { | ||||
| 	for _, etp := range exerr.ListRegisteredTypes() { | ||||
| 		if etp.Key == v { | ||||
| 			return etp, nil | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return exerr.NewType(v, nil), nil | ||||
| }) | ||||
| @@ -4,6 +4,7 @@ import ( | ||||
| 	"context" | ||||
| 	"database/sql" | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"sync" | ||||
| ) | ||||
| @@ -16,7 +17,11 @@ type DB interface { | ||||
| 	AddListener(listener Listener) | ||||
| 	Exit() error | ||||
| 	RegisterConverter(DBTypeConverter) | ||||
| 	RegisterDefaultConverter() | ||||
| } | ||||
|  | ||||
| type DBOptions struct { | ||||
| 	RegisterDefaultConverter *bool | ||||
| 	RegisterCommentTrimmer   *bool | ||||
| } | ||||
|  | ||||
| type database struct { | ||||
| @@ -27,13 +32,23 @@ type database struct { | ||||
| 	conv  []DBTypeConverter | ||||
| } | ||||
|  | ||||
| func NewDB(db *sqlx.DB) DB { | ||||
| 	return &database{ | ||||
| func NewDB(db *sqlx.DB, opt DBOptions) DB { | ||||
| 	sqdb := &database{ | ||||
| 		db:    db, | ||||
| 		txctr: 0, | ||||
| 		lock:  sync.Mutex{}, | ||||
| 		lstr:  make([]Listener, 0), | ||||
| 	} | ||||
|  | ||||
| 	if langext.Coalesce(opt.RegisterDefaultConverter, true) { | ||||
| 		sqdb.registerDefaultConverter() | ||||
| 	} | ||||
|  | ||||
| 	if langext.Coalesce(opt.RegisterCommentTrimmer, true) { | ||||
| 		sqdb.AddListener(CommentTrimmer) | ||||
| 	} | ||||
|  | ||||
| 	return sqdb | ||||
| } | ||||
|  | ||||
| func (db *database) AddListener(listener Listener) { | ||||
| @@ -45,7 +60,7 @@ func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Resul | ||||
| 	for _, v := range db.lstr { | ||||
| 		err := v.PreExec(ctx, nil, &sqlstr, &prep) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -56,7 +71,7 @@ func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Resul | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
| 	return res, nil | ||||
| } | ||||
| @@ -66,7 +81,7 @@ func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Ro | ||||
| 	for _, v := range db.lstr { | ||||
| 		err := v.PreQuery(ctx, nil, &sqlstr, &prep) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -77,7 +92,7 @@ func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Ro | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
| 	return rows, nil | ||||
| } | ||||
| @@ -97,7 +112,7 @@ func (db *database) Ping(ctx context.Context) error { | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 		return exerr.Wrap(err, "Failed to [ping] sql database").Build() | ||||
| 	} | ||||
| 	return nil | ||||
| } | ||||
| @@ -117,7 +132,7 @@ func (db *database) BeginTransaction(ctx context.Context, iso sql.IsolationLevel | ||||
|  | ||||
| 	xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso}) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to start sql transaction").Build() | ||||
| 	} | ||||
|  | ||||
| 	for _, v := range db.lstr { | ||||
| @@ -140,14 +155,24 @@ func (db *database) RegisterConverter(conv DBTypeConverter) { | ||||
| 	db.conv = append(db.conv, conv) | ||||
| } | ||||
|  | ||||
| func (db *database) RegisterDefaultConverter() { | ||||
| func (db *database) registerDefaultConverter() { | ||||
| 	db.RegisterConverter(ConverterBoolToBit) | ||||
|  | ||||
| 	db.RegisterConverter(ConverterTimeToUnixMillis) | ||||
|  | ||||
| 	db.RegisterConverter(ConverterRFCUnixMilliTimeToUnixMillis) | ||||
| 	db.RegisterConverter(ConverterRFCUnixNanoTimeToUnixNanos) | ||||
| 	db.RegisterConverter(ConverterRFCUnixTimeToUnixSeconds) | ||||
| 	db.RegisterConverter(ConverterRFC339TimeToString) | ||||
| 	db.RegisterConverter(ConverterRFC339NanoTimeToString) | ||||
| 	db.RegisterConverter(ConverterRFCDateToString) | ||||
| 	db.RegisterConverter(ConverterRFCTimeToString) | ||||
| 	db.RegisterConverter(ConverterRFCSecondsF64ToString) | ||||
|  | ||||
| 	db.RegisterConverter(ConverterJsonObjToString) | ||||
| 	db.RegisterConverter(ConverterJsonArrToString) | ||||
|  | ||||
| 	db.RegisterConverter(ConverterExErrCategoryToString) | ||||
| 	db.RegisterConverter(ConverterExErrSeverityToString) | ||||
| 	db.RegisterConverter(ConverterExErrTypeToString) | ||||
| } | ||||
|   | ||||
							
								
								
									
										56
									
								
								sq/filter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								sq/filter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | ||||
| package sq | ||||
|  | ||||
| import ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||
|  | ||||
| type FilterSort struct { | ||||
| 	Field     string | ||||
| 	Direction ct.SortDirection | ||||
| } | ||||
|  | ||||
| type PaginateFilter interface { | ||||
| 	SQL(params PP) (filterClause string, joinClause string, joinTables []string) | ||||
| 	Sort() []FilterSort | ||||
| } | ||||
|  | ||||
| type genericPaginateFilter struct { | ||||
| 	sql  func(params PP) (filterClause string, joinClause string, joinTables []string) | ||||
| 	sort func() []FilterSort | ||||
| } | ||||
|  | ||||
| func (g genericPaginateFilter) SQL(params PP) (filterClause string, joinClause string, joinTables []string) { | ||||
| 	return g.sql(params) | ||||
| } | ||||
|  | ||||
| func (g genericPaginateFilter) Sort() []FilterSort { | ||||
| 	return g.sort() | ||||
| } | ||||
|  | ||||
| func NewPaginateFilter(sql func(params PP) (filterClause string, joinClause string, joinTables []string), sort []FilterSort) PaginateFilter { | ||||
| 	return genericPaginateFilter{ | ||||
| 		sql: func(params PP) (filterClause string, joinClause string, joinTables []string) { | ||||
| 			return sql(params) | ||||
| 		}, | ||||
| 		sort: func() []FilterSort { | ||||
| 			return sort | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewSimplePaginateFilter(filterClause string, filterParams PP, sort []FilterSort) PaginateFilter { | ||||
| 	return genericPaginateFilter{ | ||||
| 		sql: func(params PP) (string, string, []string) { | ||||
| 			params.AddAll(filterParams) | ||||
| 			return filterClause, "", nil | ||||
| 		}, | ||||
| 		sort: func() []FilterSort { | ||||
| 			return sort | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewEmptyPaginateFilter() PaginateFilter { | ||||
| 	return genericPaginateFilter{ | ||||
| 		sql:  func(params PP) (string, string, []string) { return "1=1", "", nil }, | ||||
| 		sort: func() []FilterSort { return make([]FilterSort, 0) }, | ||||
| 	} | ||||
| } | ||||
| @@ -31,7 +31,7 @@ func HashMattnSqliteSchema(ctx context.Context, schemaStr string) (string, error | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db := NewDB(xdb, DBOptions{}) | ||||
|  | ||||
| 	_, err = db.Exec(ctx, schemaStr, PP{}) | ||||
| 	if err != nil { | ||||
| @@ -59,7 +59,7 @@ func HashGoSqliteSchema(ctx context.Context, schemaStr string) (string, error) { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db := NewDB(xdb, DBOptions{}) | ||||
|  | ||||
| 	_, err = db.Exec(ctx, schemaStr, PP{}) | ||||
| 	if err != nil { | ||||
|   | ||||
							
								
								
									
										54
									
								
								sq/json.go
									
									
									
									
									
								
							
							
						
						
									
										54
									
								
								sq/json.go
									
									
									
									
									
								
							| @@ -1,5 +1,59 @@ | ||||
| package sq | ||||
|  | ||||
| import "encoding/json" | ||||
|  | ||||
| type JsonObj map[string]any | ||||
|  | ||||
| func (j JsonObj) MarshalToDB(v JsonObj) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| } | ||||
|  | ||||
| func (j JsonObj) UnmarshalToModel(v string) (JsonObj, error) { | ||||
| 	var mrsh JsonObj | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return JsonObj{}, err | ||||
| 	} | ||||
| 	return mrsh, nil | ||||
| } | ||||
|  | ||||
| type JsonArr []any | ||||
|  | ||||
| func (j JsonArr) MarshalToDB(v JsonArr) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| } | ||||
|  | ||||
| func (j JsonArr) UnmarshalToModel(v string) (JsonArr, error) { | ||||
| 	var mrsh JsonArr | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return JsonArr{}, err | ||||
| 	} | ||||
| 	return mrsh, nil | ||||
| } | ||||
|  | ||||
| type AutoJson[T any] struct { | ||||
| 	Value T | ||||
| } | ||||
|  | ||||
| func (j AutoJson[T]) MarshalToDB(v AutoJson[T]) (string, error) { | ||||
| 	mrsh, err := json.Marshal(v.Value) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	return string(mrsh), nil | ||||
| } | ||||
|  | ||||
| func (j AutoJson[T]) UnmarshalToModel(v string) (AutoJson[T], error) { | ||||
| 	mrsh := *new(T) | ||||
| 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||
| 		return AutoJson[T]{}, err | ||||
| 	} | ||||
| 	return AutoJson[T]{Value: mrsh}, nil | ||||
| } | ||||
|   | ||||
							
								
								
									
										48
									
								
								sq/list.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								sq/list.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| ) | ||||
|  | ||||
| func Iterate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int, consumer func(ctx context.Context, v TData) error) (int, error) { | ||||
| 	if filter == nil { | ||||
| 		filter = NewEmptyPaginateFilter() | ||||
| 	} | ||||
|  | ||||
| 	prepParams := PP{} | ||||
|  | ||||
| 	sortOrder := filter.Sort() | ||||
| 	sortCond := "" | ||||
| 	if len(sortOrder) > 0 { | ||||
| 		sortCond = "ORDER BY " | ||||
| 		for i, v := range sortOrder { | ||||
| 			if i > 0 { | ||||
| 				sortCond += ", " | ||||
| 			} | ||||
| 			sortCond += v.Field + " " + string(v.Direction) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	pageCond := "" | ||||
| 	if limit != nil { | ||||
| 		pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1))) | ||||
| 	} | ||||
|  | ||||
| 	filterCond, joinCond, joinTables := filter.SQL(prepParams) | ||||
|  | ||||
| 	selectCond := table + ".*" | ||||
| 	for _, v := range joinTables { | ||||
| 		selectCond += ", " + v + ".*" | ||||
| 	} | ||||
|  | ||||
| 	sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond | ||||
|  | ||||
| 	rows, err := q.Query(ctx, sqlQueryData, prepParams) | ||||
| 	if err != nil { | ||||
| 		return 0, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build() | ||||
| 	} | ||||
|  | ||||
| 	return IterateAll[TData](ctx, q, rows, scanMode, scanSec, true, consumer) | ||||
| } | ||||
							
								
								
									
										169
									
								
								sq/listener.go
									
									
									
									
									
								
							
							
						
						
									
										169
									
								
								sq/listener.go
									
									
									
									
									
								
							| @@ -17,3 +17,172 @@ type Listener interface { | ||||
| 	PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| 	PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| } | ||||
|  | ||||
| type genListener struct { | ||||
| 	prePing        func(ctx context.Context) error | ||||
| 	preTxBegin     func(ctx context.Context, txid uint16) error | ||||
| 	preTxCommit    func(txid uint16) error | ||||
| 	preTxRollback  func(txid uint16) error | ||||
| 	preQuery       func(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||
| 	preExec        func(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||
| 	postPing       func(result error) | ||||
| 	postTxBegin    func(txid uint16, result error) | ||||
| 	postTxCommit   func(txid uint16, result error) | ||||
| 	postTxRollback func(txid uint16, result error) | ||||
| 	postQuery      func(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| 	postExec       func(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| } | ||||
|  | ||||
| func (g genListener) PrePing(ctx context.Context) error { | ||||
| 	if g.prePing != nil { | ||||
| 		return g.prePing(ctx) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error { | ||||
| 	if g.preTxBegin != nil { | ||||
| 		return g.preTxBegin(ctx, txid) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxCommit(txid uint16) error { | ||||
| 	if g.preTxCommit != nil { | ||||
| 		return g.preTxCommit(txid) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxRollback(txid uint16) error { | ||||
| 	if g.preTxRollback != nil { | ||||
| 		return g.preTxRollback(txid) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 	if g.preQuery != nil { | ||||
| 		return g.preQuery(ctx, txID, sql, params) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 	if g.preExec != nil { | ||||
| 		return g.preExec(ctx, txID, sql, params) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostPing(result error) { | ||||
| 	if g.postPing != nil { | ||||
| 		g.postPing(result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxBegin(txid uint16, result error) { | ||||
| 	if g.postTxBegin != nil { | ||||
| 		g.postTxBegin(txid, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxCommit(txid uint16, result error) { | ||||
| 	if g.postTxCommit != nil { | ||||
| 		g.postTxCommit(txid, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxRollback(txid uint16, result error) { | ||||
| 	if g.postTxRollback != nil { | ||||
| 		g.postTxRollback(txid, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 	if g.postQuery != nil { | ||||
| 		g.postQuery(txID, sqlOriginal, sqlReal, params) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 	if g.postExec != nil { | ||||
| 		g.postExec(txID, sqlOriginal, sqlReal, params) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewPrePingListener(f func(ctx context.Context) error) Listener { | ||||
| 	return genListener{prePing: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxBeginListener(f func(ctx context.Context, txid uint16) error) Listener { | ||||
| 	return genListener{preTxBegin: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxCommitListener(f func(txid uint16) error) Listener { | ||||
| 	return genListener{preTxCommit: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxRollbackListener(f func(txid uint16) error) Listener { | ||||
| 	return genListener{preTxRollback: f} | ||||
| } | ||||
|  | ||||
| func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| 	return genListener{preQuery: f} | ||||
| } | ||||
|  | ||||
| func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| 	return genListener{preExec: f} | ||||
| } | ||||
|  | ||||
| func NewPreListener(f func(ctx context.Context, cmdtype string, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| 	return genListener{ | ||||
| 		preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 			return f(ctx, "EXEC", txID, sql, params) | ||||
| 		}, | ||||
| 		preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 			return f(ctx, "QUERY", txID, sql, params) | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewPostPingListener(f func(result error)) Listener { | ||||
| 	return genListener{postPing: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxBeginListener(f func(txid uint16, result error)) Listener { | ||||
| 	return genListener{postTxBegin: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxCommitListener(f func(txid uint16, result error)) Listener { | ||||
| 	return genListener{postTxCommit: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxRollbackListener(f func(txid uint16, result error)) Listener { | ||||
| 	return genListener{postTxRollback: f} | ||||
| } | ||||
|  | ||||
| func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| 	return genListener{postQuery: f} | ||||
| } | ||||
|  | ||||
| func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| 	return genListener{postExec: f} | ||||
| } | ||||
|  | ||||
| func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| 	return genListener{ | ||||
| 		postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 			f("EXEC", txID, sqlOriginal, sqlReal, params) | ||||
| 		}, | ||||
| 		postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 			f("QUERY", txID, sqlOriginal, sqlReal, params) | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|   | ||||
							
								
								
									
										15
									
								
								sq/main_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								sq/main_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"os" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| func TestMain(m *testing.M) { | ||||
| 	if !exerr.Initialized() { | ||||
| 		exerr.Init(exerr.ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse}) | ||||
| 	} | ||||
| 	os.Exit(m.Run()) | ||||
| } | ||||
							
								
								
									
										123
									
								
								sq/paginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										123
									
								
								sq/paginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,123 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||
| ) | ||||
|  | ||||
| func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||
| 	if filter == nil { | ||||
| 		filter = NewEmptyPaginateFilter() | ||||
| 	} | ||||
|  | ||||
| 	prepParams := PP{} | ||||
|  | ||||
| 	sortOrder := filter.Sort() | ||||
| 	sortCond := "" | ||||
| 	if len(sortOrder) > 0 { | ||||
| 		sortCond = "ORDER BY " | ||||
| 		for i, v := range sortOrder { | ||||
| 			if i > 0 { | ||||
| 				sortCond += ", " | ||||
| 			} | ||||
| 			sortCond += v.Field + " " + string(v.Direction) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	pageCond := "" | ||||
| 	if limit != nil { | ||||
| 		pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1))) | ||||
| 	} | ||||
|  | ||||
| 	filterCond, joinCond, joinTables := filter.SQL(prepParams) | ||||
|  | ||||
| 	selectCond := table + ".*" | ||||
| 	for _, v := range joinTables { | ||||
| 		selectCond += ", " + v + ".*" | ||||
| 	} | ||||
|  | ||||
| 	sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond | ||||
| 	sqlQueryCount := "SELECT " + "COUNT(*)" + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " | ||||
|  | ||||
| 	rows, err := q.Query(ctx, sqlQueryData, prepParams) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build() | ||||
| 	} | ||||
|  | ||||
| 	entities, err := ScanAll[TData](ctx, q, rows, scanMode, scanSec, true) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode paginated entries from DB").Str("table", table).Int("page", page).Any("limit", limit).Str("scanMode", string(scanMode)).Str("scanSec", string(scanSec)).Build() | ||||
| 	} | ||||
|  | ||||
| 	if page == 1 && (limit == nil || len(entities) <= *limit) { | ||||
| 		return entities, pag.Pagination{ | ||||
| 			Page:             1, | ||||
| 			Limit:            langext.Coalesce(limit, len(entities)), | ||||
| 			TotalPages:       1, | ||||
| 			TotalItems:       len(entities), | ||||
| 			CurrentPageCount: 1, | ||||
| 		}, nil | ||||
| 	} else { | ||||
|  | ||||
| 		countRows, err := q.Query(ctx, sqlQueryCount, prepParams) | ||||
| 		if err != nil { | ||||
| 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to query total-count of paginated entries from DB").Str("table", table).Build() | ||||
| 		} | ||||
|  | ||||
| 		if !countRows.Next() { | ||||
| 			return nil, pag.Pagination{}, exerr.New(exerr.TypeSQLDecode, "SQL COUNT(*) query returned no rows").Str("table", table).Any("filter", filter).Build() | ||||
| 		} | ||||
|  | ||||
| 		var countRes int | ||||
| 		err = countRows.Scan(&countRes) | ||||
| 		if err != nil { | ||||
| 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode total-count of paginated entries from DB").Str("table", table).Build() | ||||
| 		} | ||||
|  | ||||
| 		if len(entities) > *limit { | ||||
| 			entities = entities[:*limit] | ||||
| 		} | ||||
|  | ||||
| 		paginationObj := pag.Pagination{ | ||||
| 			Page:             page, | ||||
| 			Limit:            langext.Coalesce(limit, countRes), | ||||
| 			TotalPages:       pag.CalcPaginationTotalPages(countRes, langext.Coalesce(limit, countRes)), | ||||
| 			TotalItems:       countRes, | ||||
| 			CurrentPageCount: len(entities), | ||||
| 		} | ||||
|  | ||||
| 		return entities, paginationObj, nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | ||||
| 	if filter == nil { | ||||
| 		filter = NewEmptyPaginateFilter() | ||||
| 	} | ||||
|  | ||||
| 	prepParams := PP{} | ||||
|  | ||||
| 	filterCond, joinCond, _ := filter.SQL(prepParams) | ||||
|  | ||||
| 	sqlQueryCount := "SELECT " + "COUNT(*)" + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " )" | ||||
|  | ||||
| 	countRows, err := q.Query(ctx, sqlQueryCount, prepParams) | ||||
| 	if err != nil { | ||||
| 		return 0, exerr.Wrap(err, "failed to query count of entries from DB").Str("table", table).Build() | ||||
| 	} | ||||
|  | ||||
| 	if !countRows.Next() { | ||||
| 		return 0, exerr.New(exerr.TypeSQLDecode, "SQL COUNT(*) query returned no rows").Str("table", table).Any("filter", filter).Build() | ||||
| 	} | ||||
|  | ||||
| 	var countRes int | ||||
| 	err = countRows.Scan(&countRes) | ||||
| 	if err != nil { | ||||
| 		return 0, exerr.Wrap(err, "failed to decode count of entries from DB").Str("table", table).Build() | ||||
| 	} | ||||
|  | ||||
| 	return countRes, nil | ||||
| } | ||||
| @@ -20,6 +20,12 @@ func (pp *PP) Add(v any) string { | ||||
| 	return id | ||||
| } | ||||
|  | ||||
| func (pp *PP) AddAll(other PP) { | ||||
| 	for id, v := range other { | ||||
| 		(*pp)[id] = v | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func PPID() string { | ||||
| 	return "p_" + langext.RandBase62(8) | ||||
| } | ||||
|   | ||||
							
								
								
									
										251
									
								
								sq/scanner.go
									
									
									
									
									
								
							
							
						
						
									
										251
									
								
								sq/scanner.go
									
									
									
									
									
								
							| @@ -6,8 +6,9 @@ import ( | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| type StructScanMode string | ||||
| @@ -26,43 +27,11 @@ const ( | ||||
|  | ||||
| func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, v TData) (sql.Result, error) { | ||||
|  | ||||
| 	rval := reflect.ValueOf(v) | ||||
| 	rtyp := rval.Type() | ||||
|  | ||||
| 	columns := make([]string, 0) | ||||
| 	params := make([]string, 0) | ||||
| 	pp := PP{} | ||||
|  | ||||
| 	for i := 0; i < rtyp.NumField(); i++ { | ||||
|  | ||||
| 		rsfield := rtyp.Field(i) | ||||
| 		rvfield := rval.Field(i) | ||||
|  | ||||
| 		if !rsfield.IsExported() { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		columnName := rsfield.Tag.Get("db") | ||||
| 		if columnName == "" || columnName == "-" { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		paramkey := fmt.Sprintf("_%s", columnName) | ||||
|  | ||||
| 		columns = append(columns, "\""+columnName+"\"") | ||||
| 		params = append(params, ":"+paramkey) | ||||
|  | ||||
| 		val, err := convertValueToDB(q, rvfield.Interface()) | ||||
| 	sqlstr, pp, err := BuildInsertStatement(q, tableName, v) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 		pp[paramkey] = val | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	sqlstr := fmt.Sprintf("INSERT"+" INTO \"%s\" (%s) VALUES (%s)", tableName, strings.Join(columns, ", "), strings.Join(params, ", ")) | ||||
|  | ||||
| 	sqlr, err := q.Exec(ctx, sqlstr, pp) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| @@ -71,6 +40,127 @@ func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, | ||||
| 	return sqlr, nil | ||||
| } | ||||
|  | ||||
| func InsertAndQuerySingle[TData any](ctx context.Context, q Queryable, tableName string, v TData, idColumn string, mode StructScanMode, sec StructScanSafety) (TData, error) { | ||||
|  | ||||
| 	rval := reflect.ValueOf(v) | ||||
|  | ||||
| 	idRVal := fieldByTag(rval, "db", idColumn) | ||||
| 	if !idRVal.IsValid() || idRVal.IsZero() { | ||||
| 		return *new(TData), fmt.Errorf("failed to find idColumn '%s' in %T", idColumn, v) | ||||
| 	} | ||||
|  | ||||
| 	idValue, err := convertValueToDB(q, idRVal.Interface()) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	_, err = InsertSingle[TData](ctx, q, tableName, v) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	pp := PP{} | ||||
|  | ||||
| 	//goland:noinspection ALL | ||||
| 	sqlstr := fmt.Sprintf("SELECT * FROM %s WHERE %s = :%s", tableName, idColumn, pp.Add(idValue)) | ||||
|  | ||||
| 	return QuerySingle[TData](ctx, q, sqlstr, pp, mode, sec) | ||||
| } | ||||
|  | ||||
| func fieldByTag(rval reflect.Value, tagkey string, tagval string) reflect.Value { | ||||
| 	rtyp := rval.Type() | ||||
| 	for i := 0; i < rtyp.NumField(); i++ { | ||||
| 		rsfield := rtyp.Field(i) | ||||
|  | ||||
| 		if !rsfield.IsExported() { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		if rsfield.Tag.Get(tagkey) == tagval { | ||||
| 			return rval.Field(i) | ||||
| 		} | ||||
| 	} | ||||
| 	panic(fmt.Sprintf("tag %s = '%s' not found in %s", tagkey, tagval, rtyp.Name())) | ||||
| } | ||||
|  | ||||
| func InsertMultiple[TData any](ctx context.Context, q Queryable, tableName string, vArr []TData, maxBatch int) ([]sql.Result, error) { | ||||
|  | ||||
| 	if len(vArr) == 0 { | ||||
| 		return make([]sql.Result, 0), nil | ||||
| 	} | ||||
|  | ||||
| 	chunks := langext.ArrChunk(vArr, maxBatch) | ||||
|  | ||||
| 	sqlstrArr := make([]string, 0) | ||||
| 	ppArr := make([]PP, 0) | ||||
|  | ||||
| 	for _, chunk := range chunks { | ||||
|  | ||||
| 		sqlstr, pp, err := BuildInsertMultipleStatement(q, tableName, chunk) | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
|  | ||||
| 		sqlstrArr = append(sqlstrArr, sqlstr) | ||||
| 		ppArr = append(ppArr, pp) | ||||
| 	} | ||||
|  | ||||
| 	res := make([]sql.Result, 0, len(sqlstrArr)) | ||||
|  | ||||
| 	for i := 0; i < len(sqlstrArr); i++ { | ||||
| 		sqlr, err := q.Exec(ctx, sqlstrArr[i], ppArr[i]) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
|  | ||||
| 		res = append(res, sqlr) | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func UpdateSingle[TData any](ctx context.Context, q Queryable, tableName string, v TData, idColumn string) (sql.Result, error) { | ||||
|  | ||||
| 	sqlstr, pp, err := BuildUpdateStatement(q, tableName, v, idColumn) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	sqlr, err := q.Exec(ctx, sqlstr, pp) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return sqlr, nil | ||||
| } | ||||
|  | ||||
| func UpdateAndQuerySingle[TData any](ctx context.Context, q Queryable, tableName string, v TData, idColumn string, mode StructScanMode, sec StructScanSafety) (TData, error) { | ||||
|  | ||||
| 	rval := reflect.ValueOf(v) | ||||
|  | ||||
| 	idRVal := fieldByTag(rval, "db", idColumn) | ||||
| 	if !idRVal.IsValid() || idRVal.IsZero() { | ||||
| 		return *new(TData), fmt.Errorf("failed to find idColumn '%s' in %T", idColumn, v) | ||||
| 	} | ||||
|  | ||||
| 	idValue, err := convertValueToDB(q, idRVal.Interface()) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	_, err = UpdateSingle[TData](ctx, q, tableName, v, idColumn) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	pp := PP{} | ||||
|  | ||||
| 	//goland:noinspection ALL | ||||
| 	sqlstr := fmt.Sprintf("SELECT * FROM %s WHERE %s = :%s", tableName, idColumn, pp.Add(idValue)) | ||||
|  | ||||
| 	return QuerySingle[TData](ctx, q, sqlstr, pp, mode, sec) | ||||
| } | ||||
|  | ||||
| func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) (TData, error) { | ||||
| 	rows, err := q.Query(ctx, sql, pp) | ||||
| 	if err != nil { | ||||
| @@ -85,6 +175,23 @@ func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, | ||||
| 	return data, nil | ||||
| } | ||||
|  | ||||
| func QuerySingleOpt[TData any](ctx context.Context, q Queryable, sqlstr string, pp PP, mode StructScanMode, sec StructScanSafety) (*TData, error) { | ||||
| 	rows, err := q.Query(ctx, sqlstr, pp) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	data, err := ScanSingle[TData](ctx, q, rows, mode, sec, true) | ||||
| 	if errors.Is(err, sql.ErrNoRows) { | ||||
| 		return nil, nil | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return &data, nil | ||||
| } | ||||
|  | ||||
| func QueryAll[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) ([]TData, error) { | ||||
| 	rows, err := q.Query(ctx, sql, pp) | ||||
| 	if err != nil { | ||||
| @@ -226,3 +333,79 @@ func ScanAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode | ||||
| 	} | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func IterateAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool, consumer func(ctx context.Context, v TData) error) (int, error) { | ||||
| 	var strscan *StructScanner | ||||
|  | ||||
| 	if sec == Safe { | ||||
| 		strscan = NewStructScanner(rows, false) | ||||
| 		var data TData | ||||
| 		err := strscan.Start(&data) | ||||
| 		if err != nil { | ||||
| 			return 0, err | ||||
| 		} | ||||
| 	} else if sec == Unsafe { | ||||
| 		strscan = NewStructScanner(rows, true) | ||||
| 		var data TData | ||||
| 		err := strscan.Start(&data) | ||||
| 		if err != nil { | ||||
| 			return 0, err | ||||
| 		} | ||||
| 	} else { | ||||
| 		return 0, errors.New("unknown value for <sec>") | ||||
| 	} | ||||
|  | ||||
| 	rcount := 0 | ||||
|  | ||||
| 	for rows.Next() { | ||||
|  | ||||
| 		if err := ctx.Err(); err != nil { | ||||
| 			return rcount, err | ||||
| 		} | ||||
|  | ||||
| 		if mode == SModeFast { | ||||
| 			var data TData | ||||
| 			err := strscan.StructScanBase(&data) | ||||
| 			if err != nil { | ||||
| 				return rcount, err | ||||
| 			} | ||||
|  | ||||
| 			err = consumer(ctx, data) | ||||
| 			if err != nil { | ||||
| 				return rcount, exerr.Wrap(err, "").Build() | ||||
| 			} | ||||
|  | ||||
| 			rcount++ | ||||
|  | ||||
| 		} else if mode == SModeExtended { | ||||
| 			var data TData | ||||
| 			err := strscan.StructScanExt(q, &data) | ||||
| 			if err != nil { | ||||
| 				return rcount, err | ||||
| 			} | ||||
|  | ||||
| 			err = consumer(ctx, data) | ||||
| 			if err != nil { | ||||
| 				return rcount, exerr.Wrap(err, "").Build() | ||||
| 			} | ||||
|  | ||||
| 			rcount++ | ||||
|  | ||||
| 		} else { | ||||
| 			return rcount, errors.New("unknown value for <mode>") | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if close { | ||||
| 		err := strscan.rows.Close() | ||||
| 		if err != nil { | ||||
| 			return rcount, err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := rows.Err(); err != nil { | ||||
| 		return rcount, err | ||||
| 	} | ||||
|  | ||||
| 	return rcount, nil | ||||
| } | ||||
|   | ||||
							
								
								
									
										234
									
								
								sq/scanner_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										234
									
								
								sq/scanner_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,234 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"database/sql" | ||||
| 	"fmt" | ||||
| 	"github.com/glebarez/go-sqlite" | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||
| 	"path/filepath" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| func TestInsertSingle(t *testing.T) { | ||||
|  | ||||
| 	type request struct { | ||||
| 		ID        string  `json:"id"        db:"id"` | ||||
| 		Timestamp int     `json:"timestamp" db:"timestamp"` | ||||
| 		StrVal    string  `json:"strVal"    db:"str_val"` | ||||
| 		FloatVal  float64 `json:"floatVal"    db:"float_val"` | ||||
| 		Dummy     bool    `json:"dummyBool" db:"dummy_bool"` | ||||
| 		JsonVal   JsonObj `json:"jsonVal" db:"json_val"` | ||||
| 	} | ||||
|  | ||||
| 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||
| 		sqlite.RegisterAsSQLITE3() | ||||
| 	} | ||||
|  | ||||
| 	ctx := context.Background() | ||||
|  | ||||
| 	dbdir := t.TempDir() | ||||
| 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||
|  | ||||
| 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, ` | ||||
| 		CREATE TABLE requests (  | ||||
| 		    id TEXT NOT NULL,  | ||||
| 		    timestamp INTEGER NOT NULL,  | ||||
| 		    str_val TEXT NOT NULL,  | ||||
| 		    float_val REAL NOT NULL,  | ||||
| 		    dummy_bool INTEGER NOT NULL  CHECK(dummy_bool IN (0, 1)),  | ||||
| 		    json_val TEXT NOT NULL,  | ||||
| 		    PRIMARY KEY (id)  | ||||
| 		) STRICT | ||||
| `, PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	_, err = InsertSingle(ctx, db, "requests", request{ | ||||
| 		ID:        "9927", | ||||
| 		Timestamp: 12321, | ||||
| 		StrVal:    "hello world", | ||||
| 		Dummy:     true, | ||||
| 		FloatVal:  3.14159, | ||||
| 		JsonVal: JsonObj{ | ||||
| 			"firs":   1, | ||||
| 			"second": true, | ||||
| 		}, | ||||
| 	}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
| } | ||||
|  | ||||
| func TestUpdateSingle(t *testing.T) { | ||||
|  | ||||
| 	type request struct { | ||||
| 		ID        string  `json:"id"        db:"id"` | ||||
| 		Timestamp int     `json:"timestamp" db:"timestamp"` | ||||
| 		StrVal    string  `json:"strVal"    db:"str_val"` | ||||
| 		FloatVal  float64 `json:"floatVal"    db:"float_val"` | ||||
| 		Dummy     bool    `json:"dummyBool" db:"dummy_bool"` | ||||
| 		JsonVal   JsonObj `json:"jsonVal" db:"json_val"` | ||||
| 	} | ||||
|  | ||||
| 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||
| 		sqlite.RegisterAsSQLITE3() | ||||
| 	} | ||||
|  | ||||
| 	ctx := context.Background() | ||||
|  | ||||
| 	dbdir := t.TempDir() | ||||
| 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||
|  | ||||
| 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, ` | ||||
| 		CREATE TABLE requests (  | ||||
| 		    id TEXT NOT NULL,  | ||||
| 		    timestamp INTEGER NOT NULL,  | ||||
| 		    str_val TEXT NOT NULL,  | ||||
| 		    float_val REAL NOT NULL,  | ||||
| 		    dummy_bool INTEGER NOT NULL  CHECK(dummy_bool IN (0, 1)),  | ||||
| 		    json_val TEXT NOT NULL,  | ||||
| 		    PRIMARY KEY (id)  | ||||
| 		) STRICT | ||||
| `, PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	_, err = InsertSingle(ctx, db, "requests", request{ | ||||
| 		ID:        "9927", | ||||
| 		Timestamp: 12321, | ||||
| 		StrVal:    "hello world", | ||||
| 		Dummy:     true, | ||||
| 		FloatVal:  3.14159, | ||||
| 		JsonVal: JsonObj{ | ||||
| 			"first":  1, | ||||
| 			"second": true, | ||||
| 		}, | ||||
| 	}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	v, err := QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '9927' LIMIT 1", PP{}, SModeExtended, Safe) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	tst.AssertEqual(t, v.Timestamp, 12321) | ||||
| 	tst.AssertEqual(t, v.StrVal, "hello world") | ||||
| 	tst.AssertEqual(t, v.Dummy, true) | ||||
| 	tst.AssertEqual(t, v.FloatVal, 3.14159) | ||||
| 	tst.AssertStrRepEqual(t, v.JsonVal["first"], 1) | ||||
| 	tst.AssertStrRepEqual(t, v.JsonVal["second"], true) | ||||
|  | ||||
| 	_, err = UpdateSingle(ctx, db, "requests", request{ | ||||
| 		ID:        "9927", | ||||
| 		Timestamp: 9999, | ||||
| 		StrVal:    "9999 hello world", | ||||
| 		Dummy:     false, | ||||
| 		FloatVal:  123.222, | ||||
| 		JsonVal: JsonObj{ | ||||
| 			"first":  2, | ||||
| 			"second": false, | ||||
| 		}, | ||||
| 	}, "id") | ||||
|  | ||||
| 	v, err = QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '9927' LIMIT 1", PP{}, SModeExtended, Safe) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	tst.AssertEqual(t, v.Timestamp, 9999) | ||||
| 	tst.AssertEqual(t, v.StrVal, "9999 hello world") | ||||
| 	tst.AssertEqual(t, v.Dummy, false) | ||||
| 	tst.AssertEqual(t, v.FloatVal, 123.222) | ||||
| 	tst.AssertStrRepEqual(t, v.JsonVal["first"], 2) | ||||
| 	tst.AssertStrRepEqual(t, v.JsonVal["second"], false) | ||||
| } | ||||
|  | ||||
| func TestInsertMultiple(t *testing.T) { | ||||
|  | ||||
| 	type request struct { | ||||
| 		ID        string  `json:"id"        db:"id"` | ||||
| 		Timestamp int     `json:"timestamp" db:"timestamp"` | ||||
| 		StrVal    string  `json:"strVal"    db:"str_val"` | ||||
| 		FloatVal  float64 `json:"floatVal"    db:"float_val"` | ||||
| 		Dummy     bool    `json:"dummyBool" db:"dummy_bool"` | ||||
| 		JsonVal   JsonObj `json:"jsonVal" db:"json_val"` | ||||
| 	} | ||||
|  | ||||
| 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||
| 		sqlite.RegisterAsSQLITE3() | ||||
| 	} | ||||
|  | ||||
| 	ctx := context.Background() | ||||
|  | ||||
| 	dbdir := t.TempDir() | ||||
| 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||
|  | ||||
| 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, ` | ||||
| 		CREATE TABLE requests (  | ||||
| 		    id TEXT NOT NULL,  | ||||
| 		    timestamp INTEGER NOT NULL,  | ||||
| 		    str_val TEXT NOT NULL,  | ||||
| 		    float_val REAL NOT NULL,  | ||||
| 		    dummy_bool INTEGER NOT NULL  CHECK(dummy_bool IN (0, 1)),  | ||||
| 		    json_val TEXT NOT NULL,  | ||||
| 		    PRIMARY KEY (id)  | ||||
| 		) STRICT | ||||
| `, PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	_, err = InsertMultiple(ctx, db, "requests", []request{ | ||||
| 		{ | ||||
| 			ID:        "1", | ||||
| 			Timestamp: 1000, | ||||
| 			StrVal:    "one", | ||||
| 			Dummy:     true, | ||||
| 			FloatVal:  0.1, | ||||
| 			JsonVal: JsonObj{ | ||||
| 				"arr": []int{0}, | ||||
| 			}, | ||||
| 		}, | ||||
| 		{ | ||||
| 			ID:        "2", | ||||
| 			Timestamp: 2000, | ||||
| 			StrVal:    "two", | ||||
| 			Dummy:     true, | ||||
| 			FloatVal:  0.2, | ||||
| 			JsonVal: JsonObj{ | ||||
| 				"arr": []int{0, 0}, | ||||
| 			}, | ||||
| 		}, | ||||
| 		{ | ||||
| 			ID:        "3", | ||||
| 			Timestamp: 3000, | ||||
| 			StrVal:    "three", | ||||
| 			Dummy:     true, | ||||
| 			FloatVal:  0.3, | ||||
| 			JsonVal: JsonObj{ | ||||
| 				"arr": []int{0, 0, 0}, | ||||
| 			}, | ||||
| 		}, | ||||
| 	}, -1) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	_, err = QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '1' LIMIT 1", PP{}, SModeExtended, Safe) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	_, err = QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '2' LIMIT 1", PP{}, SModeExtended, Safe) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	_, err = QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '3' LIMIT 1", PP{}, SModeExtended, Safe) | ||||
| 	tst.AssertNoErr(t, err) | ||||
| } | ||||
| @@ -36,8 +36,7 @@ func TestTypeConverter1(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
| @@ -71,8 +70,7 @@ func TestTypeConverter2(t *testing.T) { | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
| @@ -93,3 +91,61 @@ func TestTypeConverter2(t *testing.T) { | ||||
| 	tst.AssertEqual(t, "002", r.ID) | ||||
| 	tst.AssertEqual(t, t0.UnixNano(), r.Timestamp.UnixNano()) | ||||
| } | ||||
|  | ||||
| func TestTypeConverter3(t *testing.T) { | ||||
|  | ||||
| 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||
| 		sqlite.RegisterAsSQLITE3() | ||||
| 	} | ||||
|  | ||||
| 	type RequestData struct { | ||||
| 		ID        string                 `db:"id"` | ||||
| 		Timestamp *rfctime.UnixMilliTime `db:"timestamp"` | ||||
| 	} | ||||
|  | ||||
| 	ctx := context.Background() | ||||
|  | ||||
| 	dbdir := t.TempDir() | ||||
| 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||
|  | ||||
| 	tst.AssertNoErr(t, os.MkdirAll(dbdir, os.ModePerm)) | ||||
|  | ||||
| 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue}) | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	t0 := rfctime.NewUnixMilli(time.Date(2012, 03, 01, 16, 0, 0, 0, time.UTC)) | ||||
|  | ||||
| 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||
| 		ID:        "001", | ||||
| 		Timestamp: &t0, | ||||
| 	}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||
| 		ID:        "002", | ||||
| 		Timestamp: nil, | ||||
| 	}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	{ | ||||
| 		r1, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '001'", PP{}, SModeExtended, Safe) | ||||
| 		tst.AssertNoErr(t, err) | ||||
| 		fmt.Printf("%+v\n", r1) | ||||
| 		tst.AssertEqual(t, "001", r1.ID) | ||||
| 		tst.AssertEqual(t, t0.UnixNano(), r1.Timestamp.UnixNano()) | ||||
| 	} | ||||
|  | ||||
| 	{ | ||||
| 		r2, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '002'", PP{}, SModeExtended, Safe) | ||||
| 		tst.AssertNoErr(t, err) | ||||
| 		fmt.Printf("%+v\n", r2) | ||||
| 		tst.AssertEqual(t, "002", r2.ID) | ||||
| 		tst.AssertEqual(t, nil, r2.Timestamp) | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -7,6 +7,7 @@ import ( | ||||
| 	"github.com/jmoiron/sqlx/reflectx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| // forked from sqlx, but added ability to unmarshal optional-nested structs | ||||
| @@ -18,7 +19,7 @@ type StructScanner struct { | ||||
|  | ||||
| 	fields    [][]int | ||||
| 	values    []any | ||||
| 	converter []DBTypeConverter | ||||
| 	converter []ssConverter | ||||
| 	columns   []string | ||||
| } | ||||
|  | ||||
| @@ -30,6 +31,11 @@ func NewStructScanner(rows *sqlx.Rows, unsafe bool) *StructScanner { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| type ssConverter struct { | ||||
| 	Converter DBTypeConverter | ||||
| 	RefCount  int | ||||
| } | ||||
|  | ||||
| func (r *StructScanner) Start(dest any) error { | ||||
| 	v := reflect.ValueOf(dest) | ||||
|  | ||||
| @@ -49,7 +55,7 @@ func (r *StructScanner) Start(dest any) error { | ||||
| 		return fmt.Errorf("missing destination name %s in %T", columns[f], dest) | ||||
| 	} | ||||
| 	r.values = make([]interface{}, len(columns)) | ||||
| 	r.converter = make([]DBTypeConverter, len(columns)) | ||||
| 	r.converter = make([]ssConverter, len(columns)) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
| @@ -143,13 +149,19 @@ func (r *StructScanner) StructScanExt(q Queryable, dest any) error { | ||||
|  | ||||
| 			f.Set(reflect.Zero(f.Type())) // set to nil | ||||
| 		} else { | ||||
| 			if r.converter[i] != nil { | ||||
| 				val3 := val2.Elem().Interface() | ||||
| 				conv3, err := r.converter[i].DBToModel(val3) | ||||
| 			if r.converter[i].Converter != nil { | ||||
| 				val3 := val2.Elem() | ||||
| 				conv3, err := r.converter[i].Converter.DBToModel(val3.Interface()) | ||||
| 				if err != nil { | ||||
| 					return err | ||||
| 				} | ||||
| 				f.Set(reflect.ValueOf(conv3)) | ||||
| 				conv3RVal := reflect.ValueOf(conv3) | ||||
| 				for j := 0; j < r.converter[i].RefCount; j++ { | ||||
| 					newConv3Val := reflect.New(conv3RVal.Type()) | ||||
| 					newConv3Val.Elem().Set(conv3RVal) | ||||
| 					conv3RVal = newConv3Val | ||||
| 				} | ||||
| 				f.Set(conv3RVal) | ||||
| 			} else { | ||||
| 				f.Set(val2.Elem()) | ||||
| 			} | ||||
| @@ -184,7 +196,7 @@ func (r *StructScanner) StructScanBase(dest any) error { | ||||
| } | ||||
|  | ||||
| // fieldsByTraversal forked from github.com/jmoiron/sqlx@v1.3.5/sqlx.go | ||||
| func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, values []interface{}, converter []DBTypeConverter) error { | ||||
| func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, values []interface{}, converter []ssConverter) error { | ||||
| 	v = reflect.Indirect(v) | ||||
| 	if v.Kind() != reflect.Struct { | ||||
| 		return errors.New("argument not a struct") | ||||
| @@ -205,14 +217,26 @@ func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, | ||||
| 				_v := langext.Ptr[any](nil) | ||||
| 				values[i] = _v | ||||
| 				foundConverter = true | ||||
| 				converter[i] = conv | ||||
| 				converter[i] = ssConverter{Converter: conv, RefCount: 0} | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 		if !foundConverter { | ||||
| 			// also allow non-pointer converter for pointer-types | ||||
| 			for _, conv := range q.ListConverter() { | ||||
| 				if conv.ModelTypeString() == strings.TrimLeft(typeStr, "*") { | ||||
| 					_v := langext.Ptr[any](nil) | ||||
| 					values[i] = _v | ||||
| 					foundConverter = true | ||||
| 					converter[i] = ssConverter{Converter: conv, RefCount: len(typeStr) - len(strings.TrimLeft(typeStr, "*"))} // kind hacky way to get the amount of ptr before <f>, but it works... | ||||
| 					break | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		if !foundConverter { | ||||
| 			values[i] = reflect.New(reflect.PointerTo(f.Type())).Interface() | ||||
| 			converter[i] = nil | ||||
| 			converter[i] = ssConverter{Converter: nil, RefCount: -1} | ||||
| 		} | ||||
| 	} | ||||
| 	return nil | ||||
|   | ||||
| @@ -4,6 +4,7 @@ import ( | ||||
| 	"context" | ||||
| 	"database/sql" | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| @@ -48,7 +49,7 @@ func (tx *transaction) Rollback() error { | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreTxRollback(tx.id) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 			return exerr.Wrap(err, "failed to call SQL pre-rollback listener").Int("tx.id", int(tx.id)).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -69,7 +70,7 @@ func (tx *transaction) Commit() error { | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreTxCommit(tx.id) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 			return exerr.Wrap(err, "failed to call SQL pre-commit listener").Int("tx.id", int(tx.id)).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -91,7 +92,7 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -106,7 +107,7 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
| 	return res, nil | ||||
| } | ||||
| @@ -116,7 +117,7 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -131,7 +132,7 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
| 	return rows, nil | ||||
| } | ||||
|   | ||||
| @@ -146,3 +146,37 @@ func UnixFloatSeconds(v float64) time.Time { | ||||
| func FloorTime(t time.Time) time.Time { | ||||
| 	return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) | ||||
| } | ||||
|  | ||||
| func SubtractYears(t time.Time, yearCount float64, tz *time.Location) time.Time { | ||||
| 	t = t.In(tz) | ||||
|  | ||||
| 	if yearCount < 0 { | ||||
| 		return AddYears(t, -yearCount, tz) | ||||
| 	} | ||||
|  | ||||
| 	intCount, floatCount := math.Modf(yearCount) | ||||
|  | ||||
| 	t.AddDate(-int(intCount), 0, 0) | ||||
|  | ||||
| 	t0 := TimeToYearStart(t, tz) | ||||
| 	t1 := TimeToYearEnd(t, tz) | ||||
|  | ||||
| 	return t.Add(time.Duration(float64(t1.Sub(t0)) * floatCount * -1)) | ||||
| } | ||||
|  | ||||
| func AddYears(t time.Time, yearCount float64, tz *time.Location) time.Time { | ||||
| 	t = t.In(tz) | ||||
|  | ||||
| 	if yearCount < 0 { | ||||
| 		return SubtractYears(t, -yearCount, tz) | ||||
| 	} | ||||
|  | ||||
| 	intCount, floatCount := math.Modf(yearCount) | ||||
|  | ||||
| 	t.AddDate(int(intCount), 0, 0) | ||||
|  | ||||
| 	t0 := TimeToYearStart(t, tz) | ||||
| 	t1 := TimeToYearEnd(t, tz) | ||||
|  | ||||
| 	return t.Add(time.Duration(float64(t1.Sub(t0)) * floatCount)) | ||||
| } | ||||
|   | ||||
| @@ -2,6 +2,7 @@ package tst | ||||
|  | ||||
| import ( | ||||
| 	"encoding/hex" | ||||
| 	"fmt" | ||||
| 	"reflect" | ||||
| 	"runtime/debug" | ||||
| 	"testing" | ||||
| @@ -125,3 +126,17 @@ func AssertNoErr(t *testing.T, anerr error) { | ||||
| 		t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack())) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func AssertStrRepEqual(t *testing.T, actual any, expected any) { | ||||
| 	t.Helper() | ||||
| 	if fmt.Sprintf("%v", actual) != fmt.Sprintf("%v", expected) { | ||||
| 		t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func AssertStrRepNotEqual(t *testing.T, actual any, expected any) { | ||||
| 	t.Helper() | ||||
| 	if fmt.Sprintf("%v", actual) == fmt.Sprintf("%v", expected) { | ||||
| 		t.Errorf("values do not differ: Actual: '%v', Expected: '%v'", actual, expected) | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -38,6 +38,13 @@ type fullTypeRef struct { | ||||
| 	Index          []int | ||||
| } | ||||
|  | ||||
| type IColl interface { | ||||
| 	Collection() *mongo.Collection | ||||
| 	Name() string | ||||
| 	Indexes() mongo.IndexView | ||||
| 	Drop(ctx context.Context) error | ||||
| } | ||||
|  | ||||
| type Coll[TData any] struct { | ||||
| 	coll                *mongo.Collection                                        // internal mongo collection, access via Collection() | ||||
| 	dataTypeMap         map[string]fullTypeRef                                   // list of TData fields (only if TData is not an interface) | ||||
| @@ -45,7 +52,8 @@ type Coll[TData any] struct { | ||||
| 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | ||||
| 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | ||||
| 	unmarshalHooks      []func(d TData) TData                                    // called for every object after unmarshalling | ||||
| 	extraModPipeline    mongo.Pipeline                                           // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | ||||
| 	marshalHooks        []func(d TData) TData                                    // called for every object before marshalling | ||||
| 	extraModPipeline    []func(ctx context.Context) mongo.Pipeline               // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Collection() *mongo.Collection { | ||||
| @@ -76,14 +84,34 @@ func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable | ||||
| 	return c | ||||
| } | ||||
|  | ||||
| // WithUnmarshalHook | ||||
| // function that is called for every object after reading from DB | ||||
| func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] { | ||||
| 	c.unmarshalHooks = append(c.unmarshalHooks, fn) | ||||
|  | ||||
| 	return c | ||||
| } | ||||
|  | ||||
| // WithMarshalHook | ||||
| // function that is called for every object before writing to DB | ||||
| func (c *Coll[TData]) WithMarshalHook(fn func(d TData) TData) *Coll[TData] { | ||||
| 	c.marshalHooks = append(c.marshalHooks, fn) | ||||
|  | ||||
| 	return c | ||||
| } | ||||
|  | ||||
| // WithModifyingPipeline | ||||
| // pipeline that is appended to all read operations (after filtering) | ||||
| func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] { | ||||
| 	c.extraModPipeline = append(c.extraModPipeline, p...) | ||||
| 	c.extraModPipeline = append(c.extraModPipeline, func(ctx context.Context) mongo.Pipeline { return p }) | ||||
|  | ||||
| 	return c | ||||
| } | ||||
|  | ||||
| // WithModifyingPipelineFunc | ||||
| // pipeline that is appended to all read operations (after filtering) | ||||
| func (c *Coll[TData]) WithModifyingPipelineFunc(fn func(ctx context.Context) mongo.Pipeline) *Coll[TData] { | ||||
| 	c.extraModPipeline = append(c.extraModPipeline, fn) | ||||
|  | ||||
| 	return c | ||||
| } | ||||
| @@ -112,3 +140,17 @@ func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirecti | ||||
| 		Extra:          ct.Extra{}, | ||||
| 	}, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool { | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		for _, stage := range ppl(ctx) { | ||||
| 			for _, bsone := range stage { | ||||
| 				if bsone.Key == "$group" { | ||||
| 					// a group stage in extraModPipeline results in unsorted data, which means the caller must sort again after these pipeline stages... | ||||
| 					return true | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|   | ||||
| @@ -10,7 +10,9 @@ import ( | ||||
|  | ||||
| func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | ||||
|  | ||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||
| 	if err != nil { | ||||
| @@ -27,7 +29,9 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op | ||||
|  | ||||
| func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | ||||
|  | ||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||
| 	if err != nil { | ||||
| @@ -47,7 +51,9 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli | ||||
|  | ||||
| func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | ||||
|  | ||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||
| 	if err != nil { | ||||
|   | ||||
| @@ -32,7 +32,17 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options. | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	if c.needsDoubleSort(ctx) { | ||||
| 		for _, opt := range opts { | ||||
| 			if opt != nil && opt.Sort != nil { | ||||
| 				pipeline = append(pipeline, bson.D{{Key: "$sort", Value: opt.Sort}}) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	for _, opt := range opts { | ||||
| 		if opt != nil && opt.Projection != nil { | ||||
|   | ||||
| @@ -71,7 +71,9 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN | ||||
| 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||
| 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}}) | ||||
|  | ||||
| 		pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||
| 		for _, ppl := range c.extraModPipeline { | ||||
| 			pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||
| 		} | ||||
|  | ||||
| 		cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||
| 		if err != nil { | ||||
|   | ||||
| @@ -9,6 +9,10 @@ import ( | ||||
| ) | ||||
|  | ||||
| func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) { | ||||
| 	for _, hook := range c.marshalHooks { | ||||
| 		valueIn = hook(valueIn) | ||||
| 	} | ||||
|  | ||||
| 	insRes, err := c.coll.InsertOne(ctx, valueIn) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||
| @@ -36,6 +40,12 @@ func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TDat | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | ||||
| 	for _, hook := range c.marshalHooks { | ||||
| 		for i := 0; i < len(valueIn); i++ { | ||||
| 			valueIn[i] = hook(valueIn[i]) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build() | ||||
|   | ||||
| @@ -6,6 +6,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/mongo" | ||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | ||||
| @@ -13,6 +14,10 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | ||||
| 		return make([]TData, 0), ct.End(), nil | ||||
| 	} | ||||
|  | ||||
| 	if pageSize != nil && *pageSize == 0 { | ||||
| 		return make([]TData, 0), inTok, nil // fast track, we return an empty list and do not advance the cursor token | ||||
| 	} | ||||
|  | ||||
| 	pipeline := mongo.Pipeline{} | ||||
| 	pf1 := "_id" | ||||
| 	pd1 := ct.SortASC | ||||
| @@ -34,7 +39,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | ||||
| 		sortDirSecondary = nil | ||||
| 	} | ||||
|  | ||||
| 	paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||
| 	paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||
| 	if err != nil { | ||||
| 		return nil, ct.CursorToken{}, exerr. | ||||
| 			Wrap(err, "failed to create pagination"). | ||||
| @@ -50,7 +55,14 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | ||||
| 	} | ||||
|  | ||||
| 	pipeline = append(pipeline, paginationPipeline...) | ||||
| 	pipeline = append(pipeline, c.extraModPipeline...) | ||||
|  | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	if c.needsDoubleSort(ctx) { | ||||
| 		pipeline = langext.ArrConcat(pipeline, doubleSortPipeline) | ||||
| 	} | ||||
|  | ||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||
| 	if err != nil { | ||||
| @@ -132,14 +144,50 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS | ||||
| 	return data, token, count, nil | ||||
| } | ||||
|  | ||||
| func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { | ||||
| func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]string, error) { | ||||
| 	type idObject struct { | ||||
| 		ID string `bson:"_id"` | ||||
| 	} | ||||
|  | ||||
| 	pipelineFilter := mongo.Pipeline{} | ||||
|  | ||||
| 	if filter != nil { | ||||
| 		pipelineFilter = filter.FilterQuery() | ||||
| 	} | ||||
|  | ||||
| 	extrModPipelineResolved := mongo.Pipeline{} | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	pipelineProjectIDs := mongo.Pipeline{} | ||||
| 	pipelineProjectIDs = append(pipelineProjectIDs, bson.D{{Key: "$project", Value: bson.M{"_id": 1}}}) | ||||
|  | ||||
| 	pipelineList := langext.ArrConcat(pipelineFilter, extrModPipelineResolved, pipelineProjectIDs) | ||||
|  | ||||
| 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||
| 	} | ||||
|  | ||||
| 	var res []idObject | ||||
|  | ||||
| 	err = cursorList.All(ctx, &res) | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "failed to decode entities").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||
| 	} | ||||
|  | ||||
| 	return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil | ||||
| } | ||||
|  | ||||
| func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) { | ||||
|  | ||||
| 	cond := bson.A{} | ||||
| 	sort := bson.D{} | ||||
|  | ||||
| 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() | ||||
| 		return nil, nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() | ||||
| 	} | ||||
|  | ||||
| 	if sortPrimary == ct.SortASC { | ||||
| @@ -156,7 +204,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | ||||
|  | ||||
| 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() | ||||
| 			return nil, nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() | ||||
| 		} | ||||
|  | ||||
| 		if *sortSecondary == ct.SortASC { | ||||
| @@ -167,7 +215,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | ||||
| 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, | ||||
| 			}}) | ||||
|  | ||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||
| 			sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1}) | ||||
|  | ||||
| 		} else if *sortSecondary == ct.SortDESC { | ||||
|  | ||||
| @@ -177,7 +225,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | ||||
| 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, | ||||
| 			}}) | ||||
|  | ||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||
| 			sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1}) | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
| @@ -199,15 +247,17 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | ||||
|  | ||||
| 	} else { | ||||
|  | ||||
| 		return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() | ||||
| 		return nil, nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) | ||||
|  | ||||
| 	pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}} | ||||
|  | ||||
| 	if pageSize != nil { | ||||
| 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) | ||||
| 	} | ||||
|  | ||||
| 	return pipeline, nil | ||||
| 	return pipeline, pipelineSort, nil | ||||
| } | ||||
|   | ||||
| @@ -9,7 +9,7 @@ import ( | ||||
| 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||
| ) | ||||
|  | ||||
| func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||
| func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||
| 	type totalCountResult struct { | ||||
| 		Count int `bson:"count"` | ||||
| 	} | ||||
| @@ -42,7 +42,12 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, | ||||
| 	pipelineCount := mongo.Pipeline{} | ||||
| 	pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}}) | ||||
|  | ||||
| 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, c.extraModPipeline, pipelineSort) | ||||
| 	extrModPipelineResolved := mongo.Pipeline{} | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort) | ||||
| 	pipelineTotalCount := langext.ArrConcat(pipelineFilter, pipelineCount) | ||||
|  | ||||
| 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||
|   | ||||
| @@ -61,6 +61,10 @@ func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, update | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error { | ||||
| 	for _, hook := range c.marshalHooks { | ||||
| 		value = hook(value) | ||||
| 	} | ||||
|  | ||||
| 	_, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value}) | ||||
| 	if err != nil { | ||||
| 		return exerr.Wrap(err, "mongo-query[replace-one] failed"). | ||||
| @@ -73,6 +77,10 @@ func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) { | ||||
| 	for _, hook := range c.marshalHooks { | ||||
| 		value = hook(value) | ||||
| 	} | ||||
|  | ||||
| 	mongoRes := c.coll.FindOneAndReplace(ctx, filterQuery, value, options.FindOneAndReplace().SetReturnDocument(options.After)) | ||||
| 	if err := mongoRes.Err(); err != nil { | ||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-and-update] failed"). | ||||
|   | ||||
		Reference in New Issue
	
	Block a user