Compare commits
	
		
			65 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| d2bb362135 | |||
| 9dd81f6bd5 | |||
| d2c04afcd5 | |||
| 62980e1489 | |||
| 59963adf74 | |||
| 194ea4ace5 | |||
| 73b80a66bc | |||
| d8b2d01274 | |||
| bfa8457e95 | |||
| 70106733d9 | |||
| ce7837b9ef | |||
| d0d72167eb | |||
| a55ee1a6ce | |||
| dfc319573c | |||
| 246e555f3f | |||
| c28bc086b2 | |||
| d44e971325 | |||
| fe4cdc48af | |||
| 631006a4e1 | |||
| 567ead8697 | |||
| e4886b4a7d | |||
| dcb5d3d7cd | |||
| 15a639f85a | |||
| 303bd04649 | |||
| 7bda674939 | |||
| 126d4fbd0b | |||
| fed8bccaab | |||
| 47b6a6b508 | |||
| 764ce79a71 | |||
| b876c64ba2 | |||
| 8d52b41f57 | |||
| f47e2a33fe | |||
| 9321938dad | |||
| 3828d601a2 | |||
| 2e713c808d | |||
| 6602f86b43 | |||
| 24d9f0fdc7 | |||
| 8446b2da22 | |||
| 758e5a67b5 | |||
| 678ddd7124 | |||
| 36b71dfaf3 | |||
| 9491b72b8d | |||
| 6c4af4006b | |||
| 8bf3a337cf | |||
| 16146494dc | |||
| b0e443ad99 | |||
| 9955eacf96 | |||
| f0347a9435 | |||
| 7c869c65f3 | |||
| 14f39a9162 | |||
| dcd106c1cd | |||
| b704e2a362 | |||
| 6b4bd5a6f8 | |||
| 6df4f5f2a1 | |||
| 780905ba35 | |||
| c679797765 | |||
| 401aad9fa4 | |||
| 645113d553 | |||
| 4a33986b6a | |||
| c1c8c64c76 | |||
| 0927fdc4d7 | |||
| 102a280dda | |||
| f13384d794 | |||
| 409d6e108d | |||
| ed53f297bd | 
							
								
								
									
										4
									
								
								TODO.md
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								TODO.md
									
									
									
									
									
								
							| @@ -2,6 +2,8 @@ | |||||||
|  |  | ||||||
|  - cronext |  - cronext | ||||||
|  |  | ||||||
|  - rfctime.DateOnly |  | ||||||
|  - rfctime.HMSTimeOnly |  - rfctime.HMSTimeOnly | ||||||
|  - rfctime.NanoTimeOnly |  - rfctime.NanoTimeOnly | ||||||
|  |  | ||||||
|  |  - remove sqlx dependency from sq  (unmaintained, and mostly superseeded by our own stuff?) | ||||||
|  |  - Move DBLogger and DBPreprocessor to sq | ||||||
| @@ -26,6 +26,10 @@ type CSIDDef struct { | |||||||
| 	Prefix       string | 	Prefix       string | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type CSIDGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | } | ||||||
|  |  | ||||||
| var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
| var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`)) | var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`)) | ||||||
| @@ -35,7 +39,9 @@ var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGen | |||||||
| //go:embed csid-generate.template | //go:embed csid-generate.template | ||||||
| var templateCSIDGenerateText string | var templateCSIDGenerateText string | ||||||
|  |  | ||||||
| func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | func GenerateCharsetIDSpecs(sourceDir string, destFile string, opt CSIDGenOptions) error { | ||||||
|  |  | ||||||
|  | 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -81,13 +87,18 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	pkgname := "" | 	pkgname := "" | ||||||
|  |  | ||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
| 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name())) | 		} | ||||||
|  |  | ||||||
|  | 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("\n") | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		allIDs = append(allIDs, fileIDs...) | 		allIDs = append(allIDs, fileIDs...) | ||||||
|  |  | ||||||
| @@ -113,7 +124,7 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | func processCSIDFile(basedir string, fn string, debugOutput bool) ([]CSIDDef, string, error) { | ||||||
| 	file, err := os.Open(fn) | 	file, err := os.Open(fn) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, "", err | 		return nil, "", err | ||||||
| @@ -155,7 +166,11 @@ func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | |||||||
| 				Name:         match.GroupByName("name").Value(), | 				Name:         match.GroupByName("name").Value(), | ||||||
| 				Prefix:       match.GroupByName("prefix").Value(), | 				Prefix:       match.GroupByName("prefix").Value(), | ||||||
| 			} | 			} | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
| 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||||
|  | 			} | ||||||
|  |  | ||||||
| 			ids = append(ids, def) | 			ids = append(ids, def) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -34,10 +34,10 @@ func TestGenerateCSIDSpecs(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
|   | |||||||
| @@ -37,6 +37,11 @@ type EnumDef struct { | |||||||
| 	Values       []EnumDefVal | 	Values       []EnumDefVal | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type EnumGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | 	GoFormat    *bool | ||||||
|  | } | ||||||
|  |  | ||||||
| var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||||
| @@ -48,7 +53,7 @@ var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerato | |||||||
| //go:embed enum-generate.template | //go:embed enum-generate.template | ||||||
| var templateEnumGenerateText string | var templateEnumGenerateText string | ||||||
|  |  | ||||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | func GenerateEnumSpecs(sourceDir string, destFile string, opt EnumGenOptions) error { | ||||||
|  |  | ||||||
| 	oldChecksum := "N/A" | 	oldChecksum := "N/A" | ||||||
| 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||||
| @@ -61,7 +66,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, true) | 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, langext.Coalesce(opt.GoFormat, true), langext.Coalesce(opt.DebugOutput, false)) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
| @@ -78,7 +83,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool) (string, string, bool, error) { | func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool, debugOutput bool) (string, string, bool, error) { | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -113,13 +118,18 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g | |||||||
| 	pkgname := "" | 	pkgname := "" | ||||||
|  |  | ||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
| 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name())) | 		} | ||||||
|  |  | ||||||
|  | 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return "", "", false, err | 			return "", "", false, err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("\n") | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		allEnums = append(allEnums, fileEnums...) | 		allEnums = append(allEnums, fileEnums...) | ||||||
|  |  | ||||||
| @@ -146,7 +156,7 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g | |||||||
| 	return string(fdata), newChecksum, true, nil | 	return string(fdata), newChecksum, true, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | func processEnumFile(basedir string, fn string, debugOutput bool) ([]EnumDef, string, error) { | ||||||
| 	file, err := os.Open(fn) | 	file, err := os.Open(fn) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, "", err | 		return nil, "", err | ||||||
| @@ -190,8 +200,11 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| 				Values:       make([]EnumDefVal, 0), | 				Values:       make([]EnumDefVal, 0), | ||||||
| 			} | 			} | ||||||
| 			enums = append(enums, def) | 			enums = append(enums, def) | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
| 				fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | 				fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | ||||||
| 			} | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | ||||||
| 			typename := match.GroupByName("type").Value() | 			typename := match.GroupByName("type").Value() | ||||||
| @@ -230,19 +243,24 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| 				if v.EnumTypeName == typename { | 				if v.EnumTypeName == typename { | ||||||
| 					enums[i].Values = append(enums[i].Values, def) | 					enums[i].Values = append(enums[i].Values, def) | ||||||
| 					found = true | 					found = true | ||||||
|  |  | ||||||
|  | 					if debugOutput { | ||||||
| 						if def.Description != nil { | 						if def.Description != nil { | ||||||
| 							fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | 							fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | ||||||
| 						} else { | 						} else { | ||||||
| 							fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | 							fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | ||||||
| 						} | 						} | ||||||
|  | 					} | ||||||
| 					break | 					break | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| 			if !found { | 			if !found { | ||||||
|  | 				if debugOutput { | ||||||
| 					fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | 					fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| 		} | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	return enums, pkgname, nil | 	return enums, pkgname, nil | ||||||
| } | } | ||||||
|   | |||||||
| @@ -7,6 +7,8 @@ import "gogs.mikescher.com/BlackForestBytes/goext/enums" | |||||||
|  |  | ||||||
| const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | {{ $pkgname  := .PkgName }} | ||||||
|  |  | ||||||
| {{range .Enums}} | {{range .Enums}} | ||||||
|  |  | ||||||
| {{ $hasStr   := ( . | hasStr   ) }} | {{ $hasStr   := ( . | hasStr   ) }} | ||||||
| @@ -97,6 +99,14 @@ func (e {{.EnumTypeName}}) VarName() string { | |||||||
| 	return "" | 	return "" | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) TypeName() string { | ||||||
|  | 	return "{{$typename}}" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) PackageName() string { | ||||||
|  | 	return "{{$pkgname }}" | ||||||
|  | } | ||||||
|  |  | ||||||
| func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | ||||||
|     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} |     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} | ||||||
| } | } | ||||||
| @@ -135,3 +145,11 @@ func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue { | |||||||
| {{end}} | {{end}} | ||||||
|  |  | ||||||
| {{end}} | {{end}} | ||||||
|  |  | ||||||
|  | // ================================ ================= ================================ | ||||||
|  |  | ||||||
|  | func AllPackageEnums() []enums.Enum { | ||||||
|  |     return []enums.Enum{ {{range .Enums}} | ||||||
|  |         {{ if gt (len .Values) 0 }} {{  $v := index .Values 0 }} {{ $v.VarName}}, {{end}} // {{ .EnumTypeName }} {{end}} | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -37,10 +37,10 @@ func TestGenerateEnumSpecs(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	tst.AssertEqual(t, cs1, cs2) | 	tst.AssertEqual(t, cs1, cs2) | ||||||
| @@ -76,7 +76,7 @@ func TestGenerateEnumSpecsData(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true) | 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true, true) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
|   | |||||||
| @@ -25,6 +25,10 @@ type IDDef struct { | |||||||
| 	Name         string | 	Name         string | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type IDGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | } | ||||||
|  |  | ||||||
| var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
| var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`)) | var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`)) | ||||||
| @@ -34,7 +38,9 @@ var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = | |||||||
| //go:embed id-generate.template | //go:embed id-generate.template | ||||||
| var templateIDGenerateText string | var templateIDGenerateText string | ||||||
|  |  | ||||||
| func GenerateIDSpecs(sourceDir string, destFile string) error { | func GenerateIDSpecs(sourceDir string, destFile string, opt IDGenOptions) error { | ||||||
|  |  | ||||||
|  | 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -80,13 +86,18 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	pkgname := "" | 	pkgname := "" | ||||||
|  |  | ||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
| 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name())) | 		} | ||||||
|  |  | ||||||
|  | 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("\n") | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		allIDs = append(allIDs, fileIDs...) | 		allIDs = append(allIDs, fileIDs...) | ||||||
|  |  | ||||||
| @@ -112,7 +123,7 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | func processIDFile(basedir string, fn string, debugOutput bool) ([]IDDef, string, error) { | ||||||
| 	file, err := os.Open(fn) | 	file, err := os.Open(fn) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, "", err | 		return nil, "", err | ||||||
| @@ -153,7 +164,11 @@ func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | |||||||
| 				FileRelative: rfp, | 				FileRelative: rfp, | ||||||
| 				Name:         match.GroupByName("name").Value(), | 				Name:         match.GroupByName("name").Value(), | ||||||
| 			} | 			} | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
| 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||||
|  | 			} | ||||||
|  |  | ||||||
| 			ids = append(ids, def) | 			ids = append(ids, def) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -34,10 +34,10 @@ func TestGenerateIDSpecs(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", IDGenOptions{DebugOutput: langext.PTrue}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", IDGenOptions{DebugOutput: langext.PTrue}) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
|   | |||||||
							
								
								
									
										254
									
								
								dataext/casMutex.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										254
									
								
								dataext/casMutex.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,254 @@ | |||||||
|  | package dataext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"golang.org/x/sync/semaphore" | ||||||
|  | 	"runtime" | ||||||
|  | 	"sync" | ||||||
|  | 	"sync/atomic" | ||||||
|  | 	"time" | ||||||
|  | 	"unsafe" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // from https://github.com/viney-shih/go-lock/blob/2f19fd8ce335e33e0ab9dccb1ff2ce820c3da332/cas.go | ||||||
|  |  | ||||||
|  | // CASMutex is the struct implementing RWMutex with CAS mechanism. | ||||||
|  | type CASMutex struct { | ||||||
|  | 	state     casState | ||||||
|  | 	turnstile *semaphore.Weighted | ||||||
|  |  | ||||||
|  | 	broadcastChan chan struct{} | ||||||
|  | 	broadcastMut  sync.RWMutex | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewCASMutex() *CASMutex { | ||||||
|  | 	return &CASMutex{ | ||||||
|  | 		state:         casStateNoLock, | ||||||
|  | 		turnstile:     semaphore.NewWeighted(1), | ||||||
|  | 		broadcastChan: make(chan struct{}), | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type casState int32 | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	casStateUndefined casState = iota - 2 // -2 | ||||||
|  | 	casStateWriteLock                     // -1 | ||||||
|  | 	casStateNoLock                        // 0 | ||||||
|  | 	casStateReadLock                      // >= 1 | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (m *CASMutex) getState(n int32) casState { | ||||||
|  | 	switch st := casState(n); { | ||||||
|  | 	case st == casStateWriteLock: | ||||||
|  | 		fallthrough | ||||||
|  | 	case st == casStateNoLock: | ||||||
|  | 		return st | ||||||
|  | 	case st >= casStateReadLock: | ||||||
|  | 		return casStateReadLock | ||||||
|  | 	default: | ||||||
|  | 		// actually, it should not happened. | ||||||
|  | 		return casStateUndefined | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m *CASMutex) listen() <-chan struct{} { | ||||||
|  | 	m.broadcastMut.RLock() | ||||||
|  | 	defer m.broadcastMut.RUnlock() | ||||||
|  |  | ||||||
|  | 	return m.broadcastChan | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m *CASMutex) broadcast() { | ||||||
|  | 	newCh := make(chan struct{}) | ||||||
|  |  | ||||||
|  | 	m.broadcastMut.Lock() | ||||||
|  | 	ch := m.broadcastChan | ||||||
|  | 	m.broadcastChan = newCh | ||||||
|  | 	m.broadcastMut.Unlock() | ||||||
|  |  | ||||||
|  | 	close(ch) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m *CASMutex) tryLock(ctx context.Context) bool { | ||||||
|  | 	for { | ||||||
|  | 		broker := m.listen() | ||||||
|  | 		if atomic.CompareAndSwapInt32( | ||||||
|  | 			(*int32)(unsafe.Pointer(&m.state)), | ||||||
|  | 			int32(casStateNoLock), | ||||||
|  | 			int32(casStateWriteLock), | ||||||
|  | 		) { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if ctx == nil { | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		select { | ||||||
|  | 		case <-ctx.Done(): | ||||||
|  | 			// timeout or cancellation | ||||||
|  | 			return false | ||||||
|  | 		case <-broker: | ||||||
|  | 			// waiting for signal triggered by m.broadcast() and trying again. | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // TryLockWithContext attempts to acquire the lock, blocking until resources | ||||||
|  | // are available or ctx is done (timeout or cancellation). | ||||||
|  | func (m *CASMutex) TryLockWithContext(ctx context.Context) bool { | ||||||
|  | 	if err := m.turnstile.Acquire(ctx, 1); err != nil { | ||||||
|  | 		// Acquire failed due to timeout or cancellation | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defer m.turnstile.Release(1) | ||||||
|  |  | ||||||
|  | 	return m.tryLock(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // Lock acquires the lock. | ||||||
|  | // If it is currently held by others, Lock will wait until it has a chance to acquire it. | ||||||
|  | func (m *CASMutex) Lock() { | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	m.TryLockWithContext(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // TryLock attempts to acquire the lock without blocking. | ||||||
|  | // Return false if someone is holding it now. | ||||||
|  | func (m *CASMutex) TryLock() bool { | ||||||
|  | 	if !m.turnstile.TryAcquire(1) { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defer m.turnstile.Release(1) | ||||||
|  |  | ||||||
|  | 	return m.tryLock(nil) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // TryLockWithTimeout attempts to acquire the lock within a period of time. | ||||||
|  | // Return false if spending time is more than duration and no chance to acquire it. | ||||||
|  | func (m *CASMutex) TryLockWithTimeout(duration time.Duration) bool { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), duration) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	return m.TryLockWithContext(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // Unlock releases the lock. | ||||||
|  | func (m *CASMutex) Unlock() { | ||||||
|  | 	if ok := atomic.CompareAndSwapInt32( | ||||||
|  | 		(*int32)(unsafe.Pointer(&m.state)), | ||||||
|  | 		int32(casStateWriteLock), | ||||||
|  | 		int32(casStateNoLock), | ||||||
|  | 	); !ok { | ||||||
|  | 		panic("Unlock failed") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	m.broadcast() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m *CASMutex) rTryLock(ctx context.Context) bool { | ||||||
|  | 	for { | ||||||
|  | 		broker := m.listen() | ||||||
|  | 		n := atomic.LoadInt32((*int32)(unsafe.Pointer(&m.state))) | ||||||
|  | 		st := m.getState(n) | ||||||
|  | 		switch st { | ||||||
|  | 		case casStateNoLock, casStateReadLock: | ||||||
|  | 			if atomic.CompareAndSwapInt32((*int32)(unsafe.Pointer(&m.state)), n, n+1) { | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if ctx == nil { | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		select { | ||||||
|  | 		case <-ctx.Done(): | ||||||
|  | 			// timeout or cancellation | ||||||
|  | 			return false | ||||||
|  | 		default: | ||||||
|  | 			switch st { | ||||||
|  | 			// read-lock failed due to concurrence issue, try again immediately | ||||||
|  | 			case casStateNoLock, casStateReadLock: | ||||||
|  | 				runtime.Gosched() // allow other goroutines to do stuff. | ||||||
|  | 				continue | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		select { | ||||||
|  | 		case <-ctx.Done(): | ||||||
|  | 			// timeout or cancellation | ||||||
|  | 			return false | ||||||
|  | 		case <-broker: | ||||||
|  | 			// waiting for signal triggered by m.broadcast() and trying again. | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RTryLockWithContext attempts to acquire the read lock, blocking until resources | ||||||
|  | // are available or ctx is done (timeout or cancellation). | ||||||
|  | func (m *CASMutex) RTryLockWithContext(ctx context.Context) bool { | ||||||
|  | 	if err := m.turnstile.Acquire(ctx, 1); err != nil { | ||||||
|  | 		// Acquire failed due to timeout or cancellation | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	m.turnstile.Release(1) | ||||||
|  |  | ||||||
|  | 	return m.rTryLock(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RLock acquires the read lock. | ||||||
|  | // If it is currently held by others writing, RLock will wait until it has a chance to acquire it. | ||||||
|  | func (m *CASMutex) RLock() { | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	m.RTryLockWithContext(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RTryLock attempts to acquire the read lock without blocking. | ||||||
|  | // Return false if someone is writing it now. | ||||||
|  | func (m *CASMutex) RTryLock() bool { | ||||||
|  | 	if !m.turnstile.TryAcquire(1) { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	m.turnstile.Release(1) | ||||||
|  |  | ||||||
|  | 	return m.rTryLock(nil) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RTryLockWithTimeout attempts to acquire the read lock within a period of time. | ||||||
|  | // Return false if spending time is more than duration and no chance to acquire it. | ||||||
|  | func (m *CASMutex) RTryLockWithTimeout(duration time.Duration) bool { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), duration) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	return m.RTryLockWithContext(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RUnlock releases the read lock. | ||||||
|  | func (m *CASMutex) RUnlock() { | ||||||
|  | 	n := atomic.AddInt32((*int32)(unsafe.Pointer(&m.state)), -1) | ||||||
|  | 	switch m.getState(n) { | ||||||
|  | 	case casStateUndefined, casStateWriteLock: | ||||||
|  | 		panic("RUnlock failed") | ||||||
|  | 	case casStateNoLock: | ||||||
|  | 		m.broadcast() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RLocker returns a Locker interface that implements the Lock and Unlock methods | ||||||
|  | // by calling CASMutex.RLock and CASMutex.RUnlock. | ||||||
|  | func (m *CASMutex) RLocker() sync.Locker { | ||||||
|  | 	return (*rlocker)(m) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type rlocker CASMutex | ||||||
|  |  | ||||||
|  | func (r *rlocker) Lock()   { (*CASMutex)(r).RLock() } | ||||||
|  | func (r *rlocker) Unlock() { (*CASMutex)(r).RUnlock() } | ||||||
							
								
								
									
										59
									
								
								dataext/optional.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								dataext/optional.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | |||||||
|  | package dataext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type JsonOpt[T any] struct { | ||||||
|  | 	isSet bool | ||||||
|  | 	value T | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // MarshalJSON returns m as the JSON encoding of m. | ||||||
|  | func (m JsonOpt[T]) MarshalJSON() ([]byte, error) { | ||||||
|  | 	if !m.isSet { | ||||||
|  | 		return []byte("null"), nil // actually this would be undefined - but undefined is not valid JSON | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return json.Marshal(m.value) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // UnmarshalJSON sets *m to a copy of data. | ||||||
|  | func (m *JsonOpt[T]) UnmarshalJSON(data []byte) error { | ||||||
|  | 	if m == nil { | ||||||
|  | 		return errors.New("JsonOpt: UnmarshalJSON on nil pointer") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	m.isSet = true | ||||||
|  | 	return json.Unmarshal(data, &m.value) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) IsSet() bool { | ||||||
|  | 	return m.isSet | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) IsUnset() bool { | ||||||
|  | 	return !m.isSet | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) Value() (T, bool) { | ||||||
|  | 	if !m.isSet { | ||||||
|  | 		return *new(T), false | ||||||
|  | 	} | ||||||
|  | 	return m.value, true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) ValueOrNil() *T { | ||||||
|  | 	if !m.isSet { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return &m.value | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) MustValue() T { | ||||||
|  | 	if !m.isSet { | ||||||
|  | 		panic("value not set") | ||||||
|  | 	} | ||||||
|  | 	return m.value | ||||||
|  | } | ||||||
| @@ -35,6 +35,23 @@ func (s *SyncMap[TKey, TData]) SetIfNotContains(key TKey, data TData) bool { | |||||||
| 	return true | 	return true | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) SetIfNotContainsFunc(key TKey, data func() TData) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if _, existsInPreState := s.data[key]; existsInPreState { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	s.data[key] = data() | ||||||
|  |  | ||||||
|  | 	return true | ||||||
|  | } | ||||||
|  |  | ||||||
| func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) { | func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) { | ||||||
| 	s.lock.Lock() | 	s.lock.Lock() | ||||||
| 	defer s.lock.Unlock() | 	defer s.lock.Unlock() | ||||||
| @@ -50,6 +67,39 @@ func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAndSetIfNotContains(key TKey, data TData) TData { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v, ok := s.data[key]; ok { | ||||||
|  | 		return v | ||||||
|  | 	} else { | ||||||
|  | 		s.data[key] = data | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAndSetIfNotContainsFunc(key TKey, data func() TData) TData { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v, ok := s.data[key]; ok { | ||||||
|  | 		return v | ||||||
|  | 	} else { | ||||||
|  | 		dataObj := data() | ||||||
|  | 		s.data[key] = dataObj | ||||||
|  | 		return dataObj | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
| func (s *SyncMap[TKey, TData]) Delete(key TKey) bool { | func (s *SyncMap[TKey, TData]) Delete(key TKey) bool { | ||||||
| 	s.lock.Lock() | 	s.lock.Lock() | ||||||
| 	defer s.lock.Unlock() | 	defer s.lock.Unlock() | ||||||
|   | |||||||
| @@ -5,6 +5,8 @@ type Enum interface { | |||||||
| 	ValuesAny() []any | 	ValuesAny() []any | ||||||
| 	ValuesMeta() []EnumMetaValue | 	ValuesMeta() []EnumMetaValue | ||||||
| 	VarName() string | 	VarName() string | ||||||
|  | 	TypeName() string | ||||||
|  | 	PackageName() string | ||||||
| } | } | ||||||
|  |  | ||||||
| type StringEnum interface { | type StringEnum interface { | ||||||
|   | |||||||
| @@ -71,6 +71,7 @@ type Builder struct { | |||||||
| 	wrappedErr          error | 	wrappedErr          error | ||||||
| 	errorData           *ExErr | 	errorData           *ExErr | ||||||
| 	containsGinData     bool | 	containsGinData     bool | ||||||
|  | 	containsContextData bool | ||||||
| 	noLog               bool | 	noLog               bool | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -309,27 +310,27 @@ func (b *Builder) Errs(key string, val []error) *Builder { | |||||||
| func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder { | func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder { | ||||||
| 	if v := ctx.Value("start_timestamp"); v != nil { | 	if v := ctx.Value("start_timestamp"); v != nil { | ||||||
| 		if t, ok := v.(time.Time); ok { | 		if t, ok := v.(time.Time); ok { | ||||||
| 			b.Time("ctx.startTimestamp", t) | 			b.Time("ctx_startTimestamp", t) | ||||||
| 			b.Time("ctx.endTimestamp", time.Now()) | 			b.Time("ctx_endTimestamp", time.Now()) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	b.Str("gin.method", req.Method) | 	b.Str("gin_method", req.Method) | ||||||
| 	b.Str("gin.path", g.FullPath()) | 	b.Str("gin_path", g.FullPath()) | ||||||
| 	b.Strs("gin.header", extractHeader(g.Request.Header)) | 	b.Strs("gin_header", extractHeader(g.Request.Header)) | ||||||
| 	if req.URL != nil { | 	if req.URL != nil { | ||||||
| 		b.Str("gin.url", req.URL.String()) | 		b.Str("gin_url", req.URL.String()) | ||||||
| 	} | 	} | ||||||
| 	if ctxVal := g.GetString("apiversion"); ctxVal != "" { | 	if ctxVal := g.GetString("apiversion"); ctxVal != "" { | ||||||
| 		b.Str("gin.context.apiversion", ctxVal) | 		b.Str("gin_context_apiversion", ctxVal) | ||||||
| 	} | 	} | ||||||
| 	if ctxVal := g.GetString("uid"); ctxVal != "" { | 	if ctxVal := g.GetString("uid"); ctxVal != "" { | ||||||
| 		b.Str("gin.context.uid", ctxVal) | 		b.Str("gin_context_uid", ctxVal) | ||||||
| 	} | 	} | ||||||
| 	if ctxVal := g.GetString("fcmId"); ctxVal != "" { | 	if ctxVal := g.GetString("fcmId"); ctxVal != "" { | ||||||
| 		b.Str("gin.context.fcmid", ctxVal) | 		b.Str("gin_context_fcmid", ctxVal) | ||||||
| 	} | 	} | ||||||
| 	if ctxVal := g.GetString("reqid"); ctxVal != "" { | 	if ctxVal := g.GetString("reqid"); ctxVal != "" { | ||||||
| 		b.Str("gin.context.reqid", ctxVal) | 		b.Str("gin_context_reqid", ctxVal) | ||||||
| 	} | 	} | ||||||
| 	if req.Method != "GET" && req.Body != nil { | 	if req.Method != "GET" && req.Body != nil { | ||||||
|  |  | ||||||
| @@ -340,12 +341,12 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | |||||||
| 						var prettyJSON bytes.Buffer | 						var prettyJSON bytes.Buffer | ||||||
| 						err = json.Indent(&prettyJSON, bin, "", "  ") | 						err = json.Indent(&prettyJSON, bin, "", "  ") | ||||||
| 						if err == nil { | 						if err == nil { | ||||||
| 							b.Str("gin.body", string(prettyJSON.Bytes())) | 							b.Str("gin_body", string(prettyJSON.Bytes())) | ||||||
| 						} else { | 						} else { | ||||||
| 							b.Bytes("gin.body", bin) | 							b.Bytes("gin_body", bin) | ||||||
| 						} | 						} | ||||||
| 					} else { | 					} else { | ||||||
| 						b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | 						b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | ||||||
| 					} | 					} | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| @@ -355,9 +356,9 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | |||||||
| 			if brc, ok := req.Body.(dataext.BufferedReadCloser); ok { | 			if brc, ok := req.Body.(dataext.BufferedReadCloser); ok { | ||||||
| 				if bin, err := brc.BufferedAll(); err == nil { | 				if bin, err := brc.BufferedAll(); err == nil { | ||||||
| 					if len(bin) < 16*1024 { | 					if len(bin) < 16*1024 { | ||||||
| 						b.Bytes("gin.body", bin) | 						b.Bytes("gin_body", bin) | ||||||
| 					} else { | 					} else { | ||||||
| 						b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | 						b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | ||||||
| 					} | 					} | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| @@ -365,10 +366,20 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | |||||||
|  |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	pkgconfig.ExtendGinMeta(ctx, b, g, req) | ||||||
|  |  | ||||||
| 	b.containsGinData = true | 	b.containsGinData = true | ||||||
| 	return b | 	return b | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (b *Builder) CtxData(method Method, ctx context.Context) *Builder { | ||||||
|  | 	pkgconfig.ExtendContextMeta(b, method, ctx) | ||||||
|  |  | ||||||
|  | 	b.containsContextData = true | ||||||
|  |  | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
| func formatHeader(header map[string][]string) string { | func formatHeader(header map[string][]string) string { | ||||||
| 	ml := 1 | 	ml := 1 | ||||||
| 	for k, _ := range header { | 	for k, _ := range header { | ||||||
| @@ -412,9 +423,13 @@ func extractHeader(header map[string][]string) []string { | |||||||
| // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | ||||||
| // Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces | // Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces | ||||||
| // Can be locally suppressed with Builder.NoLog() | // Can be locally suppressed with Builder.NoLog() | ||||||
| func (b *Builder) Build() error { | func (b *Builder) Build(ctxs ...context.Context) error { | ||||||
| 	warnOnPkgConfigNotInitialized() | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
|  | 	for _, dctx := range ctxs { | ||||||
|  | 		b.CtxData(MethodBuild, dctx) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil { | 	if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil { | ||||||
| 		return b.wrappedErr | 		return b.wrappedErr | ||||||
| 	} | 	} | ||||||
| @@ -439,11 +454,13 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) { | |||||||
| 		b.GinReq(ctx, g, g.Request) | 		b.GinReq(ctx, g, g.Request) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	b.CtxData(MethodOutput, ctx) | ||||||
|  |  | ||||||
| 	b.errorData.Output(g) | 	b.errorData.Output(g) | ||||||
|  |  | ||||||
| 	if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal { | 	if (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) && (pkgconfig.ZeroLogErrGinOutput || pkgconfig.ZeroLogAllGinOutput) { | ||||||
| 		b.errorData.Log(stackSkipLogger.Error()) | 		b.errorData.Log(stackSkipLogger.Error()) | ||||||
| 	} else if b.errorData.Severity == SevWarn { | 	} else if (b.errorData.Severity == SevWarn) && (pkgconfig.ZeroLogAllGinOutput) { | ||||||
| 		b.errorData.Log(stackSkipLogger.Warn()) | 		b.errorData.Log(stackSkipLogger.Warn()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -452,7 +469,13 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) { | |||||||
|  |  | ||||||
| // Print prints the error | // Print prints the error | ||||||
| // If the error is SevErr we also send it to the error-service | // If the error is SevErr we also send it to the error-service | ||||||
| func (b *Builder) Print() { | func (b *Builder) Print(ctxs ...context.Context) { | ||||||
|  | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
|  | 	for _, dctx := range ctxs { | ||||||
|  | 		b.CtxData(MethodPrint, dctx) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal { | 	if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal { | ||||||
| 		b.errorData.Log(stackSkipLogger.Error()) | 		b.errorData.Log(stackSkipLogger.Error()) | ||||||
| 	} else if b.errorData.Severity == SevWarn { | 	} else if b.errorData.Severity == SevWarn { | ||||||
| @@ -468,7 +491,12 @@ func (b *Builder) Format(level LogPrintLevel) string { | |||||||
|  |  | ||||||
| // Fatal prints the error and terminates the program | // Fatal prints the error and terminates the program | ||||||
| // If the error is SevErr we also send it to the error-service | // If the error is SevErr we also send it to the error-service | ||||||
| func (b *Builder) Fatal() { | func (b *Builder) Fatal(ctxs ...context.Context) { | ||||||
|  |  | ||||||
|  | 	for _, dctx := range ctxs { | ||||||
|  | 		b.CtxData(MethodFatal, dctx) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	b.errorData.Severity = SevFatal | 	b.errorData.Severity = SevFatal | ||||||
| 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -181,7 +181,7 @@ func getReflectedMetaValues(value interface{}, remainingDepth int) map[string]Me | |||||||
|  |  | ||||||
| 	jsonval, err := json.Marshal(value) | 	jsonval, err := json.Marshal(value) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		panic(err) // gets recovered later up | 		return map[string]MetaValue{"": {DataType: MDTString, Value: fmt.Sprintf("Failed to Marshal %T:\n%+v", value, value)}} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}} | 	return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}} | ||||||
|   | |||||||
| @@ -1,91 +1,14 @@ | |||||||
| package exerr | package exerr | ||||||
|  |  | ||||||
| import ( | type Method string | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | const ( | ||||||
|  | 	MethodOutput Method = "OUTPUT" | ||||||
|  | 	MethodPrint  Method = "PRINT" | ||||||
|  | 	MethodBuild  Method = "BUILD" | ||||||
|  | 	MethodFatal  Method = "FATAL" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type ErrorCategory struct{ Category string } |  | ||||||
|  |  | ||||||
| var ( |  | ||||||
| 	CatWrap    = ErrorCategory{"Wrap"}    // The error is simply wrapping another error (e.g. when a grpc call returns an error) |  | ||||||
| 	CatSystem  = ErrorCategory{"System"}  // An internal system error (e.g. connection to db failed) |  | ||||||
| 	CatUser    = ErrorCategory{"User"}    // The user (the API caller) did something wrong (e.g. he has no permissions to do this) |  | ||||||
| 	CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| //goland:noinspection GoUnusedGlobalVariable |  | ||||||
| var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign} |  | ||||||
|  |  | ||||||
| type ErrorSeverity struct{ Severity string } |  | ||||||
|  |  | ||||||
| var ( |  | ||||||
| 	SevTrace = ErrorSeverity{"Trace"} |  | ||||||
| 	SevDebug = ErrorSeverity{"Debug"} |  | ||||||
| 	SevInfo  = ErrorSeverity{"Info"} |  | ||||||
| 	SevWarn  = ErrorSeverity{"Warn"} |  | ||||||
| 	SevErr   = ErrorSeverity{"Err"} |  | ||||||
| 	SevFatal = ErrorSeverity{"Fatal"} |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| //goland:noinspection GoUnusedGlobalVariable |  | ||||||
| var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal} |  | ||||||
|  |  | ||||||
| type ErrorType struct { |  | ||||||
| 	Key               string |  | ||||||
| 	DefaultStatusCode *int |  | ||||||
| } |  | ||||||
|  |  | ||||||
| //goland:noinspection GoUnusedGlobalVariable |  | ||||||
| var ( |  | ||||||
| 	TypeInternal       = NewType("INTERNAL_ERROR", langext.Ptr(500)) |  | ||||||
| 	TypePanic          = NewType("PANIC", langext.Ptr(500)) |  | ||||||
| 	TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500)) |  | ||||||
|  |  | ||||||
| 	TypeMongoQuery        = NewType("MONGO_QUERY", langext.Ptr(500)) |  | ||||||
| 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) |  | ||||||
| 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) |  | ||||||
| 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) |  | ||||||
| 	TypeMongoInvalidOpt   = NewType("MONGO_INVALIDOPT", langext.Ptr(500)) |  | ||||||
|  |  | ||||||
| 	TypeSQLQuery  = NewType("SQL_QUERY", langext.Ptr(500)) |  | ||||||
| 	TypeSQLBuild  = NewType("SQL_BUILD", langext.Ptr(500)) |  | ||||||
| 	TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500)) |  | ||||||
|  |  | ||||||
| 	TypeWrap = NewType("Wrap", nil) |  | ||||||
|  |  | ||||||
| 	TypeBindFailURI      = NewType("BINDFAIL_URI", langext.Ptr(400)) |  | ||||||
| 	TypeBindFailQuery    = NewType("BINDFAIL_QUERY", langext.Ptr(400)) |  | ||||||
| 	TypeBindFailJSON     = NewType("BINDFAIL_JSON", langext.Ptr(400)) |  | ||||||
| 	TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400)) |  | ||||||
| 	TypeBindFailHeader   = NewType("BINDFAIL_HEADER", langext.Ptr(400)) |  | ||||||
|  |  | ||||||
| 	TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400)) |  | ||||||
| 	TypeInvalidCSID     = NewType("INVALID_CSID", langext.Ptr(400)) |  | ||||||
|  |  | ||||||
| 	TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400)) |  | ||||||
| 	TypeGoogleResponse   = NewType("GOOGLE_RESPONSE", langext.Ptr(400)) |  | ||||||
|  |  | ||||||
| 	TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401)) |  | ||||||
| 	TypeAuthFailed   = NewType("AUTH_FAILED", langext.Ptr(401)) |  | ||||||
|  |  | ||||||
| 	// other values come from the downstream application that uses goext |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| var registeredTypes = dataext.SyncMap[string, ErrorType]{} |  | ||||||
|  |  | ||||||
| func NewType(key string, defStatusCode *int) ErrorType { |  | ||||||
| 	et := ErrorType{key, defStatusCode} |  | ||||||
|  |  | ||||||
| 	registeredTypes.Set(key, et) |  | ||||||
|  |  | ||||||
| 	return et |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func ListRegisteredTypes() []ErrorType { |  | ||||||
| 	return registeredTypes.GetAllValues() |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type LogPrintLevel string | type LogPrintLevel string | ||||||
|  |  | ||||||
| const ( | const ( | ||||||
|   | |||||||
							
								
								
									
										89
									
								
								exerr/dataCategory.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										89
									
								
								exerr/dataCategory.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,89 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ErrorCategory struct{ Category string } | ||||||
|  |  | ||||||
|  | var ( | ||||||
|  | 	CatWrap    = ErrorCategory{"Wrap"}    // The error is simply wrapping another error (e.g. when a grpc call returns an error) | ||||||
|  | 	CatSystem  = ErrorCategory{"System"}  // An internal system error (e.g. connection to db failed) | ||||||
|  | 	CatUser    = ErrorCategory{"User"}    // The user (the API caller) did something wrong (e.g. he has no permissions to do this) | ||||||
|  | 	CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (e *ErrorCategory) UnmarshalJSON(bytes []byte) error { | ||||||
|  | 	return json.Unmarshal(bytes, &e.Category) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorCategory) MarshalJSON() ([]byte, error) { | ||||||
|  | 	return json.Marshal(e.Category) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e *ErrorCategory) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*e = ErrorCategory{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeString { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt string | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*e = ErrorCategory{tt} | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorCategory) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(e.Category) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorCategory) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = e.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&e)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(e)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | //goland:noinspection GoUnusedGlobalVariable | ||||||
|  | var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign} | ||||||
							
								
								
									
										91
									
								
								exerr/dataSeverity.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										91
									
								
								exerr/dataSeverity.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,91 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ErrorSeverity struct{ Severity string } | ||||||
|  |  | ||||||
|  | var ( | ||||||
|  | 	SevTrace = ErrorSeverity{"Trace"} | ||||||
|  | 	SevDebug = ErrorSeverity{"Debug"} | ||||||
|  | 	SevInfo  = ErrorSeverity{"Info"} | ||||||
|  | 	SevWarn  = ErrorSeverity{"Warn"} | ||||||
|  | 	SevErr   = ErrorSeverity{"Err"} | ||||||
|  | 	SevFatal = ErrorSeverity{"Fatal"} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (e *ErrorSeverity) UnmarshalJSON(bytes []byte) error { | ||||||
|  | 	return json.Unmarshal(bytes, &e.Severity) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorSeverity) MarshalJSON() ([]byte, error) { | ||||||
|  | 	return json.Marshal(e.Severity) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e *ErrorSeverity) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*e = ErrorSeverity{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeString { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt string | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*e = ErrorSeverity{tt} | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorSeverity) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(e.Severity) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorSeverity) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = e.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&e)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(e)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | //goland:noinspection GoUnusedGlobalVariable | ||||||
|  | var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal} | ||||||
							
								
								
									
										155
									
								
								exerr/dataType.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										155
									
								
								exerr/dataType.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,155 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"reflect" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ErrorType struct { | ||||||
|  | 	Key               string | ||||||
|  | 	DefaultStatusCode *int | ||||||
|  | } | ||||||
|  |  | ||||||
|  | //goland:noinspection GoUnusedGlobalVariable | ||||||
|  | var ( | ||||||
|  | 	TypeInternal       = NewType("INTERNAL_ERROR", langext.Ptr(500)) | ||||||
|  | 	TypePanic          = NewType("PANIC", langext.Ptr(500)) | ||||||
|  | 	TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500)) | ||||||
|  |  | ||||||
|  | 	TypeMongoQuery        = NewType("MONGO_QUERY", langext.Ptr(500)) | ||||||
|  | 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | ||||||
|  | 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) | ||||||
|  | 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | ||||||
|  | 	TypeMongoInvalidOpt   = NewType("MONGO_INVALIDOPT", langext.Ptr(500)) | ||||||
|  |  | ||||||
|  | 	TypeSQLQuery  = NewType("SQL_QUERY", langext.Ptr(500)) | ||||||
|  | 	TypeSQLBuild  = NewType("SQL_BUILD", langext.Ptr(500)) | ||||||
|  | 	TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500)) | ||||||
|  |  | ||||||
|  | 	TypeWrap = NewType("Wrap", nil) | ||||||
|  |  | ||||||
|  | 	TypeBindFailURI      = NewType("BINDFAIL_URI", langext.Ptr(400)) | ||||||
|  | 	TypeBindFailQuery    = NewType("BINDFAIL_QUERY", langext.Ptr(400)) | ||||||
|  | 	TypeBindFailJSON     = NewType("BINDFAIL_JSON", langext.Ptr(400)) | ||||||
|  | 	TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400)) | ||||||
|  | 	TypeBindFailHeader   = NewType("BINDFAIL_HEADER", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400)) | ||||||
|  | 	TypeInvalidCSID     = NewType("INVALID_CSID", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400)) | ||||||
|  | 	TypeGoogleResponse   = NewType("GOOGLE_RESPONSE", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401)) | ||||||
|  | 	TypeAuthFailed   = NewType("AUTH_FAILED", langext.Ptr(401)) | ||||||
|  |  | ||||||
|  | 	TypeInvalidImage    = NewType("IMAGEEXT_INVALID_IMAGE", langext.Ptr(400)) | ||||||
|  | 	TypeInvalidMimeType = NewType("IMAGEEXT_INVALID_MIMETYPE", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	// other values come from the downstream application that uses goext | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (e *ErrorType) UnmarshalJSON(bytes []byte) error { | ||||||
|  | 	var k string | ||||||
|  | 	err := json.Unmarshal(bytes, &k) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if d, ok := registeredTypes.Get(k); ok { | ||||||
|  | 		*e = d | ||||||
|  | 		return nil | ||||||
|  | 	} else { | ||||||
|  | 		*e = ErrorType{k, nil} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorType) MarshalJSON() ([]byte, error) { | ||||||
|  | 	return json.Marshal(e.Key) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e *ErrorType) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*e = ErrorType{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeString { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt string | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if d, ok := registeredTypes.Get(tt); ok { | ||||||
|  | 		*e = d | ||||||
|  | 		return nil | ||||||
|  | 	} else { | ||||||
|  | 		*e = ErrorType{tt, nil} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorType) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(e.Key) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorType) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = e.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&e)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(e)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var registeredTypes = dataext.SyncMap[string, ErrorType]{} | ||||||
|  |  | ||||||
|  | func NewType(key string, defStatusCode *int) ErrorType { | ||||||
|  | 	et := ErrorType{key, defStatusCode} | ||||||
|  |  | ||||||
|  | 	registeredTypes.Set(key, et) | ||||||
|  |  | ||||||
|  | 	return et | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ListRegisteredTypes() []ErrorType { | ||||||
|  | 	return registeredTypes.GetAllValues() | ||||||
|  | } | ||||||
							
								
								
									
										153
									
								
								exerr/data_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										153
									
								
								exerr/data_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,153 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestJSONMarshalErrorCategory(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	c1 := CatSystem | ||||||
|  |  | ||||||
|  | 	jsonbin := tst.Must(json.Marshal(c1))(t) | ||||||
|  |  | ||||||
|  | 	var c2 ErrorCategory | ||||||
|  | 	tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2)) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1, c2) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, string(jsonbin), "\"System\"") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestJSONMarshalErrorSeverity(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	c1 := SevErr | ||||||
|  |  | ||||||
|  | 	jsonbin := tst.Must(json.Marshal(c1))(t) | ||||||
|  |  | ||||||
|  | 	var c2 ErrorSeverity | ||||||
|  | 	tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2)) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1, c2) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, string(jsonbin), "\"Err\"") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestJSONMarshalErrorType(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	c1 := TypeNotImplemented | ||||||
|  |  | ||||||
|  | 	jsonbin := tst.Must(json.Marshal(c1))(t) | ||||||
|  |  | ||||||
|  | 	var c2 ErrorType | ||||||
|  | 	tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2)) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1, c2) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, string(jsonbin), "\"NOT_IMPLEMENTED\"") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestBSONMarshalErrorCategory(t *testing.T) { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	client, err := mongo.Connect(ctx) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	err = client.Ping(ctx, nil) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	primimd := primitive.NewObjectID() | ||||||
|  |  | ||||||
|  | 	_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": CatSystem}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}}) | ||||||
|  |  | ||||||
|  | 	var c1 struct { | ||||||
|  | 		ID  primitive.ObjectID `bson:"_id"` | ||||||
|  | 		Val ErrorCategory      `bson:"val"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = cursor.Decode(&c1) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1.Val, CatSystem) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestBSONMarshalErrorSeverity(t *testing.T) { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	client, err := mongo.Connect(ctx) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	err = client.Ping(ctx, nil) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	primimd := primitive.NewObjectID() | ||||||
|  |  | ||||||
|  | 	_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": SevErr}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}}) | ||||||
|  |  | ||||||
|  | 	var c1 struct { | ||||||
|  | 		ID  primitive.ObjectID `bson:"_id"` | ||||||
|  | 		Val ErrorSeverity      `bson:"val"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = cursor.Decode(&c1) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1.Val, SevErr) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestBSONMarshalErrorType(t *testing.T) { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	client, err := mongo.Connect(ctx) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	err = client.Ping(ctx, nil) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	primimd := primitive.NewObjectID() | ||||||
|  |  | ||||||
|  | 	_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": TypeNotImplemented}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}}) | ||||||
|  |  | ||||||
|  | 	var c1 struct { | ||||||
|  | 		ID  primitive.ObjectID `bson:"_id"` | ||||||
|  | 		Val ErrorType          `bson:"val"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = cursor.Decode(&c1) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1.Val, TypeNotImplemented) | ||||||
|  | } | ||||||
| @@ -1,8 +1,11 @@ | |||||||
| package exerr | package exerr | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"context" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"net/http" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type ErrorPackageConfig struct { | type ErrorPackageConfig struct { | ||||||
| @@ -14,6 +17,10 @@ type ErrorPackageConfig struct { | |||||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any)                                    // (Optionally) extend the gin output with more fields | 	ExtendGinOutput        func(err *ExErr, json map[string]any)                                    // (Optionally) extend the gin output with more fields | ||||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any)                         // (Optionally) extend the gin `__data` output with more fields | 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any)                         // (Optionally) extend the gin `__data` output with more fields | ||||||
| 	DisableErrorWrapping   bool                                                                     // Disables the exerr.Wrap()...Build() function - will always return the original error | 	DisableErrorWrapping   bool                                                                     // Disables the exerr.Wrap()...Build() function - will always return the original error | ||||||
|  | 	ZeroLogErrGinOutput    bool                                                                     // autom print zerolog logs on ginext.Error() / .Output(gin)  (for SevErr and SevFatal) | ||||||
|  | 	ZeroLogAllGinOutput    bool                                                                     // autom print zerolog logs on ginext.Error() / .Output(gin)  (for all Severities) | ||||||
|  | 	ExtendGinMeta          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) // (Optionally) extend the final error meta values with additional data from the gin context (a few are automatically added, here more can be included) | ||||||
|  | 	ExtendContextMeta      func(b *Builder, method Method, dctx context.Context)                    // (Optionally) extend the final error meta values with additional data from the context (a few are automatically added, here more can be included) | ||||||
| } | } | ||||||
|  |  | ||||||
| type ErrorPackageConfigInit struct { | type ErrorPackageConfigInit struct { | ||||||
| @@ -25,6 +32,10 @@ type ErrorPackageConfigInit struct { | |||||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any) | 	ExtendGinOutput        func(err *ExErr, json map[string]any) | ||||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | ||||||
| 	DisableErrorWrapping   *bool | 	DisableErrorWrapping   *bool | ||||||
|  | 	ZeroLogErrGinOutput    *bool | ||||||
|  | 	ZeroLogAllGinOutput    *bool | ||||||
|  | 	ExtendGinMeta          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) | ||||||
|  | 	ExtendContextMeta      func(b *Builder, method Method, dctx context.Context) | ||||||
| } | } | ||||||
|  |  | ||||||
| var initialized = false | var initialized = false | ||||||
| @@ -38,6 +49,10 @@ var pkgconfig = ErrorPackageConfig{ | |||||||
| 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | ||||||
| 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | ||||||
| 	DisableErrorWrapping:   false, | 	DisableErrorWrapping:   false, | ||||||
|  | 	ZeroLogErrGinOutput:    true, | ||||||
|  | 	ZeroLogAllGinOutput:    false, | ||||||
|  | 	ExtendGinMeta:          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {}, | ||||||
|  | 	ExtendContextMeta:      func(b *Builder, method Method, dctx context.Context) {}, | ||||||
| } | } | ||||||
|  |  | ||||||
| // Init initializes the exerr packages | // Init initializes the exerr packages | ||||||
| @@ -50,6 +65,8 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
|  |  | ||||||
| 	ego := func(err *ExErr, json map[string]any) {} | 	ego := func(err *ExErr, json map[string]any) {} | ||||||
| 	egdo := func(err *ExErr, depth int, json map[string]any) {} | 	egdo := func(err *ExErr, depth int, json map[string]any) {} | ||||||
|  | 	egm := func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {} | ||||||
|  | 	egcm := func(b *Builder, method Method, dctx context.Context) {} | ||||||
|  |  | ||||||
| 	if cfg.ExtendGinOutput != nil { | 	if cfg.ExtendGinOutput != nil { | ||||||
| 		ego = cfg.ExtendGinOutput | 		ego = cfg.ExtendGinOutput | ||||||
| @@ -57,6 +74,12 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
| 	if cfg.ExtendGinDataOutput != nil { | 	if cfg.ExtendGinDataOutput != nil { | ||||||
| 		egdo = cfg.ExtendGinDataOutput | 		egdo = cfg.ExtendGinDataOutput | ||||||
| 	} | 	} | ||||||
|  | 	if cfg.ExtendGinMeta != nil { | ||||||
|  | 		egm = cfg.ExtendGinMeta | ||||||
|  | 	} | ||||||
|  | 	if cfg.ExtendContextMeta != nil { | ||||||
|  | 		egcm = cfg.ExtendContextMeta | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	pkgconfig = ErrorPackageConfig{ | 	pkgconfig = ErrorPackageConfig{ | ||||||
| 		ZeroLogErrTraces:       langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces), | 		ZeroLogErrTraces:       langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces), | ||||||
| @@ -67,6 +90,10 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
| 		ExtendGinOutput:        ego, | 		ExtendGinOutput:        ego, | ||||||
| 		ExtendGinDataOutput:    egdo, | 		ExtendGinDataOutput:    egdo, | ||||||
| 		DisableErrorWrapping:   langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping), | 		DisableErrorWrapping:   langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping), | ||||||
|  | 		ZeroLogAllGinOutput:    langext.Coalesce(cfg.ZeroLogAllGinOutput, pkgconfig.ZeroLogAllGinOutput), | ||||||
|  | 		ZeroLogErrGinOutput:    langext.Coalesce(cfg.ZeroLogErrGinOutput, pkgconfig.ZeroLogErrGinOutput), | ||||||
|  | 		ExtendGinMeta:          egm, | ||||||
|  | 		ExtendContextMeta:      egcm, | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	initialized = true | 	initialized = true | ||||||
|   | |||||||
| @@ -84,6 +84,9 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string { | |||||||
| 	if lvl == LogPrintShort { | 	if lvl == LogPrintShort { | ||||||
|  |  | ||||||
| 		msg := ee.Message | 		msg := ee.Message | ||||||
|  | 		if msg == "" { | ||||||
|  | 			msg = ee.RecursiveMessage() | ||||||
|  | 		} | ||||||
| 		if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign { | 		if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign { | ||||||
| 			msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")" | 			msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")" | ||||||
| 		} | 		} | ||||||
|   | |||||||
| @@ -2,10 +2,19 @@ package exerr | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"errors" | 	"errors" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
| 	"testing" | 	"testing" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | func TestMain(m *testing.M) { | ||||||
|  | 	if !Initialized() { | ||||||
|  | 		Init(ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse}) | ||||||
|  | 	} | ||||||
|  | 	os.Exit(m.Run()) | ||||||
|  | } | ||||||
|  |  | ||||||
| type golangErr struct { | type golangErr struct { | ||||||
| 	Message string | 	Message string | ||||||
| } | } | ||||||
|   | |||||||
| @@ -15,10 +15,10 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | |||||||
| 		ginJson["id"] = ee.UniqueID | 		ginJson["id"] = ee.UniqueID | ||||||
| 	} | 	} | ||||||
| 	if ee.Category != CatWrap { | 	if ee.Category != CatWrap { | ||||||
| 		ginJson["category"] = ee.Category | 		ginJson["category"] = ee.Category.Category | ||||||
| 	} | 	} | ||||||
| 	if ee.Type != TypeWrap { | 	if ee.Type != TypeWrap { | ||||||
| 		ginJson["type"] = ee.Type | 		ginJson["type"] = ee.Type.Key | ||||||
| 	} | 	} | ||||||
| 	if ee.StatusCode != nil { | 	if ee.StatusCode != nil { | ||||||
| 		ginJson["statuscode"] = ee.StatusCode | 		ginJson["statuscode"] = ee.StatusCode | ||||||
| @@ -30,7 +30,7 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | |||||||
| 		ginJson["caller"] = ee.Caller | 		ginJson["caller"] = ee.Caller | ||||||
| 	} | 	} | ||||||
| 	if ee.Severity != SevErr { | 	if ee.Severity != SevErr { | ||||||
| 		ginJson["severity"] = ee.Severity | 		ginJson["severity"] = ee.Severity.Severity | ||||||
| 	} | 	} | ||||||
| 	if ee.Timestamp != (time.Time{}) { | 	if ee.Timestamp != (time.Time{}) { | ||||||
| 		ginJson["time"] = ee.Timestamp.Format(time.RFC3339) | 		ginJson["time"] = ee.Timestamp.Format(time.RFC3339) | ||||||
|   | |||||||
| @@ -4,15 +4,6 @@ import ( | |||||||
| 	"sync" | 	"sync" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type Method string |  | ||||||
|  |  | ||||||
| const ( |  | ||||||
| 	MethodOutput Method = "OUTPUT" |  | ||||||
| 	MethodPrint  Method = "PRINT" |  | ||||||
| 	MethodBuild  Method = "BUILD" |  | ||||||
| 	MethodFatal  Method = "FATAL" |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| type Listener = func(method Method, v *ExErr) | type Listener = func(method Method, v *ExErr) | ||||||
|  |  | ||||||
| var listenerLock = sync.Mutex{} | var listenerLock = sync.Mutex{} | ||||||
|   | |||||||
| @@ -19,6 +19,7 @@ type GinWrapper struct { | |||||||
| 	engine          *gin.Engine | 	engine          *gin.Engine | ||||||
| 	suppressGinLogs bool | 	suppressGinLogs bool | ||||||
|  |  | ||||||
|  | 	opt                   Options | ||||||
| 	allowCors             bool | 	allowCors             bool | ||||||
| 	ginDebug              bool | 	ginDebug              bool | ||||||
| 	bufferBody            bool | 	bufferBody            bool | ||||||
| @@ -39,10 +40,13 @@ type ginRouteSpec struct { | |||||||
| type Options struct { | type Options struct { | ||||||
| 	AllowCors                *bool                                     // Add cors handler to allow all CORS requests on the default http methods | 	AllowCors                *bool                                     // Add cors handler to allow all CORS requests on the default http methods | ||||||
| 	GinDebug                 *bool                                     // Set gin.debug to true (adds more logs) | 	GinDebug                 *bool                                     // Set gin.debug to true (adds more logs) | ||||||
|  | 	SuppressGinLogs          *bool                                     // Suppress our custom gin logs (even if GinDebug == true) | ||||||
| 	BufferBody               *bool                                     // Buffers the input body stream, this way the ginext error handler can later include the whole request body | 	BufferBody               *bool                                     // Buffers the input body stream, this way the ginext error handler can later include the whole request body | ||||||
| 	Timeout                  *time.Duration                            // The default handler timeout | 	Timeout                  *time.Duration                            // The default handler timeout | ||||||
| 	ListenerBeforeRequest    []func(g *gin.Context)                    // Register listener that are called before the handler method | 	ListenerBeforeRequest    []func(g *gin.Context)                    // Register listener that are called before the handler method | ||||||
| 	ListenerAfterRequest     []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method | 	ListenerAfterRequest     []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method | ||||||
|  | 	DebugTrimHandlerPrefixes []string                                  // Trim these prefixes from the handler names in the debug print | ||||||
|  | 	DebugReplaceHandlerNames map[string]string                         // Replace handler names in debug output | ||||||
| } | } | ||||||
|  |  | ||||||
| // NewEngine creates a new (wrapped) ginEngine | // NewEngine creates a new (wrapped) ginEngine | ||||||
| @@ -51,7 +55,8 @@ func NewEngine(opt Options) *GinWrapper { | |||||||
|  |  | ||||||
| 	wrapper := &GinWrapper{ | 	wrapper := &GinWrapper{ | ||||||
| 		engine:                engine, | 		engine:                engine, | ||||||
| 		suppressGinLogs:       false, | 		opt:                   opt, | ||||||
|  | 		suppressGinLogs:       langext.Coalesce(opt.SuppressGinLogs, false), | ||||||
| 		allowCors:             langext.Coalesce(opt.AllowCors, false), | 		allowCors:             langext.Coalesce(opt.AllowCors, false), | ||||||
| 		ginDebug:              langext.Coalesce(opt.GinDebug, true), | 		ginDebug:              langext.Coalesce(opt.GinDebug, true), | ||||||
| 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | ||||||
| @@ -67,20 +72,22 @@ func NewEngine(opt Options) *GinWrapper { | |||||||
| 		engine.Use(CorsMiddleware()) | 		engine.Use(CorsMiddleware()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if wrapper.ginDebug { | ||||||
|  | 		gin.SetMode(gin.DebugMode) | ||||||
|  |  | ||||||
| 		// do not debug-print routes | 		// do not debug-print routes | ||||||
| 		gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {} | 		gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {} | ||||||
|  |  | ||||||
| 	if !wrapper.ginDebug { | 		if !wrapper.suppressGinLogs { | ||||||
|  | 			ginlogger := gin.Logger() | ||||||
|  | 			engine.Use(func(context *gin.Context) { ginlogger(context) }) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
| 		gin.SetMode(gin.ReleaseMode) | 		gin.SetMode(gin.ReleaseMode) | ||||||
|  |  | ||||||
| 		ginlogger := gin.Logger() | 		// do not debug-print routes | ||||||
| 		engine.Use(func(context *gin.Context) { | 		gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {} | ||||||
| 			if !wrapper.suppressGinLogs { |  | ||||||
| 				ginlogger(context) |  | ||||||
| 			} |  | ||||||
| 		}) |  | ||||||
| 	} else { |  | ||||||
| 		gin.SetMode(gin.DebugMode) |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return wrapper | 	return wrapper | ||||||
| @@ -184,6 +191,18 @@ func (w *GinWrapper) cleanMiddlewareName(fname string) string { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	for _, pfx := range w.opt.DebugTrimHandlerPrefixes { | ||||||
|  | 		if strings.HasPrefix(fname, pfx) { | ||||||
|  | 			fname = fname[len(pfx):] | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for k, v := range langext.ForceMap(w.opt.DebugReplaceHandlerNames) { | ||||||
|  | 		if strings.EqualFold(fname, k) { | ||||||
|  | 			fname = v | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	return fname | 	return fname | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -193,3 +212,9 @@ func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder { | |||||||
| 	w.engine.ServeHTTP(respRec, req) | 	w.engine.ServeHTTP(respRec, req) | ||||||
| 	return respRec | 	return respRec | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // ForwardRequest manually inserts a request into this router | ||||||
|  | // = behaves as if the request came from the outside (and writes the response to `writer`) | ||||||
|  | func (w *GinWrapper) ForwardRequest(writer http.ResponseWriter, req *http.Request) { | ||||||
|  | 	w.engine.ServeHTTP(writer, req) | ||||||
|  | } | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| package ginext | package ginext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"context" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| @@ -410,7 +411,8 @@ func (j jsonAPIErrResponse) Write(g *gin.Context) { | |||||||
| 	for _, v := range j.cookies { | 	for _, v := range j.cookies { | ||||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
| 	} | 	} | ||||||
| 	j.err.Output(g) |  | ||||||
|  | 	exerr.Get(j.err).Output(context.Background(), g) | ||||||
|  |  | ||||||
| 	j.err.CallListener(exerr.MethodOutput) | 	j.err.CallListener(exerr.MethodOutput) | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										49
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										49
									
								
								go.mod
									
									
									
									
									
								
							| @@ -1,34 +1,43 @@ | |||||||
| module gogs.mikescher.com/BlackForestBytes/goext | module gogs.mikescher.com/BlackForestBytes/goext | ||||||
|  |  | ||||||
| go 1.21 | go 1.22 | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/gin-gonic/gin v1.9.1 | 	github.com/gin-gonic/gin v1.10.0 | ||||||
| 	github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.- | 	github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.- | ||||||
| 	github.com/jmoiron/sqlx v1.3.5 | 	github.com/jmoiron/sqlx v1.4.0 | ||||||
| 	github.com/rs/xid v1.5.0 | 	github.com/rs/xid v1.5.0 | ||||||
| 	github.com/rs/zerolog v1.32.0 | 	github.com/rs/zerolog v1.33.0 | ||||||
| 	go.mongodb.org/mongo-driver v1.14.0 | 	go.mongodb.org/mongo-driver v1.15.0 | ||||||
| 	golang.org/x/crypto v0.21.0 | 	golang.org/x/crypto v0.23.0 | ||||||
| 	golang.org/x/sys v0.18.0 | 	golang.org/x/sys v0.20.0 | ||||||
| 	golang.org/x/term v0.18.0 | 	golang.org/x/term v0.20.0 | ||||||
| ) | ) | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/bytedance/sonic v1.11.2 // indirect | 	github.com/disintegration/imaging v1.6.2 | ||||||
|  | 	github.com/jung-kurt/gofpdf v1.16.2 | ||||||
|  | 	golang.org/x/sync v0.7.0 | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | require ( | ||||||
|  | 	github.com/bytedance/sonic v1.11.8 // indirect | ||||||
|  | 	github.com/bytedance/sonic/loader v0.1.1 // indirect | ||||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | ||||||
| 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | ||||||
|  | 	github.com/cloudwego/base64x v0.1.4 // indirect | ||||||
|  | 	github.com/cloudwego/iasm v0.2.0 // indirect | ||||||
| 	github.com/dustin/go-humanize v1.0.1 // indirect | 	github.com/dustin/go-humanize v1.0.1 // indirect | ||||||
| 	github.com/gabriel-vasile/mimetype v1.4.3 // indirect | 	github.com/gabriel-vasile/mimetype v1.4.4 // indirect | ||||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||||
| 	github.com/go-playground/locales v0.14.1 // indirect | 	github.com/go-playground/locales v0.14.1 // indirect | ||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| 	github.com/go-playground/validator/v10 v10.19.0 // indirect | 	github.com/go-playground/validator/v10 v10.21.0 // indirect | ||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.3 // indirect | ||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
| 	github.com/google/uuid v1.5.0 // indirect | 	github.com/google/uuid v1.5.0 // indirect | ||||||
| 	github.com/json-iterator/go v1.1.12 // indirect | 	github.com/json-iterator/go v1.1.12 // indirect | ||||||
| 	github.com/klauspost/compress v1.17.7 // indirect | 	github.com/klauspost/compress v1.17.8 // indirect | ||||||
| 	github.com/klauspost/cpuid/v2 v2.2.7 // indirect | 	github.com/klauspost/cpuid/v2 v2.2.7 // indirect | ||||||
| 	github.com/leodido/go-urn v1.4.0 // indirect | 	github.com/leodido/go-urn v1.4.0 // indirect | ||||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||||
| @@ -36,19 +45,19 @@ require ( | |||||||
| 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | ||||||
| 	github.com/modern-go/reflect2 v1.0.2 // indirect | 	github.com/modern-go/reflect2 v1.0.2 // indirect | ||||||
| 	github.com/montanaflynn/stats v0.7.1 // indirect | 	github.com/montanaflynn/stats v0.7.1 // indirect | ||||||
| 	github.com/pelletier/go-toml/v2 v2.1.1 // indirect | 	github.com/pelletier/go-toml/v2 v2.2.2 // indirect | ||||||
| 	github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect | 	github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect | ||||||
| 	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect | 	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect | ||||||
| 	github.com/ugorji/go/codec v1.2.12 // indirect | 	github.com/ugorji/go/codec v1.2.12 // indirect | ||||||
| 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | ||||||
| 	github.com/xdg-go/scram v1.1.2 // indirect | 	github.com/xdg-go/scram v1.1.2 // indirect | ||||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||||
| 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | 	github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 // indirect | ||||||
| 	golang.org/x/arch v0.7.0 // indirect | 	golang.org/x/arch v0.8.0 // indirect | ||||||
| 	golang.org/x/net v0.22.0 // indirect | 	golang.org/x/image v0.16.0 // indirect | ||||||
| 	golang.org/x/sync v0.6.0 // indirect | 	golang.org/x/net v0.25.0 // indirect | ||||||
| 	golang.org/x/text v0.14.0 // indirect | 	golang.org/x/text v0.15.0 // indirect | ||||||
| 	google.golang.org/protobuf v1.33.0 // indirect | 	google.golang.org/protobuf v1.34.1 // indirect | ||||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||||
| 	modernc.org/libc v1.37.6 // indirect | 	modernc.org/libc v1.37.6 // indirect | ||||||
| 	modernc.org/mathutil v1.6.0 // indirect | 	modernc.org/mathutil v1.6.0 // indirect | ||||||
|   | |||||||
							
								
								
									
										101
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										101
									
								
								go.sum
									
									
									
									
									
								
							| @@ -1,3 +1,5 @@ | |||||||
|  | filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= | ||||||
|  | github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= | ||||||
| github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= | github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= | ||||||
| github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= | github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= | ||||||
| github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= | github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= | ||||||
| @@ -8,6 +10,21 @@ github.com/bytedance/sonic v1.11.1 h1:JC0+6c9FoWYYxakaoa+c5QTtJeiSZNeByOBhXtAFSn | |||||||
| github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
| github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A= | github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A= | ||||||
| github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
|  | github.com/bytedance/sonic v1.11.3 h1:jRN+yEjakWh8aK5FzrciUHG8OFXK+4/KrAX/ysEtHAA= | ||||||
|  | github.com/bytedance/sonic v1.11.3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
|  | github.com/bytedance/sonic v1.11.4 h1:8+OMLSSDDm2/qJc6ld5K5Sm62NK9VHcUKk0NzBoMAM4= | ||||||
|  | github.com/bytedance/sonic v1.11.4/go.mod h1:YrWEqYtlBPS6LUA0vpuG79a1trsh4Ae41uWUWUreHhE= | ||||||
|  | github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k= | ||||||
|  | github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw= | ||||||
|  | github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0= | ||||||
|  | github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||||
|  | github.com/bytedance/sonic v1.11.7 h1:k/l9p1hZpNIMJSk37wL9ltkcpqLfIho1vYthi4xT2t4= | ||||||
|  | github.com/bytedance/sonic v1.11.7/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||||
|  | github.com/bytedance/sonic v1.11.8 h1:Zw/j1KfiS+OYTi9lyB3bb0CFxPJVkM17k1wyDG32LRA= | ||||||
|  | github.com/bytedance/sonic v1.11.8/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||||
|  | github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY= | ||||||
|  | github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= | ||||||
|  | github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= | github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | ||||||
| @@ -15,18 +32,35 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpV | |||||||
| github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
| github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= | github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= | ||||||
| github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
|  | github.com/cloudwego/base64x v0.1.0 h1:Tg5q9tq1khq9Y9UwfoC6zkHK0FypN2GLDvhqFceOL8U= | ||||||
|  | github.com/cloudwego/base64x v0.1.0/go.mod h1:lM8nFiNbg74QgesNo6EAtv8N9tlRjBWExmHoNDa3PkU= | ||||||
|  | github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg= | ||||||
|  | github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8= | ||||||
|  | github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= | ||||||
|  | github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= | ||||||
|  | github.com/cloudwego/iasm v0.0.9/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||||
|  | github.com/cloudwego/iasm v0.1.0 h1:q0OuhwWDMyi3nlrQ6kIr0Yx0c3FI6cq/OZWKodIDdz8= | ||||||
|  | github.com/cloudwego/iasm v0.1.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||||
|  | github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= | ||||||
|  | github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||||
| github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | ||||||
| github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
| github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | ||||||
| github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
|  | github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c= | ||||||
|  | github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4= | ||||||
| github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= | github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= | ||||||
| github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= | github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= | github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= | github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s= | ||||||
| github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | ||||||
| github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | ||||||
| github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | ||||||
| github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= | github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= | ||||||
|  | github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= | ||||||
|  | github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= | ||||||
| github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ= | github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ= | ||||||
| github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc= | github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc= | ||||||
| github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= | github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= | ||||||
| @@ -43,10 +77,18 @@ github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtP | |||||||
| github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
| github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= | github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= | ||||||
| github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
|  | github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8= | ||||||
|  | github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
|  | github.com/go-playground/validator/v10 v10.21.0 h1:4fZA11ovvtkdgaeev9RGWPgc1uj3H8W+rNYyH/ySBb0= | ||||||
|  | github.com/go-playground/validator/v10 v10.21.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||||
|  | github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= | ||||||
|  | github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= | ||||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||||
| github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | ||||||
|  | github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= | ||||||
|  | github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= | ||||||
| github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= | github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= | ||||||
| github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | ||||||
| github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||||
| @@ -62,8 +104,13 @@ github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= | |||||||
| github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||||||
| github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | ||||||
| github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | ||||||
|  | github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= | ||||||
|  | github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= | ||||||
| github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= | github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= | ||||||
| github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= | github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= | ||||||
|  | github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= | ||||||
|  | github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc= | ||||||
|  | github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= | ||||||
| github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | ||||||
| github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= | github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= | ||||||
| github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
| @@ -71,6 +118,8 @@ github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2e | |||||||
| github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
| github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= | github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= | ||||||
| github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||||
|  | github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU= | ||||||
|  | github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
| @@ -83,6 +132,7 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= | |||||||
| github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= | github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= | ||||||
| github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= | github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= | ||||||
| github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= | github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= | ||||||
|  | github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= | ||||||
| github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= | github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= | ||||||
| github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= | github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= | ||||||
| github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | ||||||
| @@ -92,6 +142,7 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D | |||||||
| github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | ||||||
| github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= | github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= | ||||||
| github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= | github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= | ||||||
|  | github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||||
| @@ -102,6 +153,14 @@ github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8 | |||||||
| github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= | github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= | ||||||
| github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= | github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= | ||||||
| github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= | github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||||
|  | github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= | ||||||
|  | github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||||
| github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||||
| github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | ||||||
| github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | ||||||
| @@ -113,9 +172,14 @@ github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= | |||||||
| github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||||
| github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= | github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= | ||||||
| github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||||
|  | github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= | ||||||
|  | github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||||
|  | github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= | ||||||
| github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= | ||||||
| github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= | github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= | ||||||
| github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= | github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= | ||||||
|  | github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= | ||||||
|  | github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= | ||||||
| github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | ||||||
| github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | ||||||
| github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | ||||||
| @@ -124,6 +188,8 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o | |||||||
| github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | ||||||
| github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= | github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= | ||||||
| github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= | github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= | ||||||
|  | github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= | ||||||
|  | github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= | ||||||
| github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= | github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= | ||||||
| github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= | github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= | ||||||
| github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= | github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= | ||||||
| @@ -137,14 +203,20 @@ github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gi | |||||||
| github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= | github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= | ||||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= | github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= | ||||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= | github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= | ||||||
|  | github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 h1:tBiBTKHnIjovYoLX/TPkcf+OjqqKGQrPtGT3Foz+Pgo= | ||||||
|  | github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76/go.mod h1:SQliXeA7Dhkt//vS29v3zpbEwoa+zb2Cn5xj5uO4K5U= | ||||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||||
| go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= | go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= | ||||||
| go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= | go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= | ||||||
| go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= | go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= | ||||||
| go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||||
|  | go.mongodb.org/mongo-driver v1.15.0 h1:rJCKC8eEliewXjZGf0ddURtl7tTVy1TK3bfl0gkUSLc= | ||||||
|  | go.mongodb.org/mongo-driver v1.15.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||||
| golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||||
| golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= | golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= | ||||||
| golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||||
|  | golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc= | ||||||
|  | golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||||
| golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||||
| golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | ||||||
| @@ -159,6 +231,15 @@ golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg= | |||||||
| golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= | golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= | ||||||
| golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= | golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= | ||||||
| golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= | golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= | ||||||
|  | golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= | ||||||
|  | golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= | ||||||
|  | golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= | ||||||
|  | golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= | ||||||
|  | golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | ||||||
|  | golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U= | ||||||
|  | golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | ||||||
|  | golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw= | ||||||
|  | golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs= | ||||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||||
| @@ -173,10 +254,16 @@ golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= | |||||||
| golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= | golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= | ||||||
| golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc= | golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc= | ||||||
| golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= | golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= | ||||||
|  | golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= | ||||||
|  | golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= | ||||||
|  | golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= | ||||||
|  | golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= | ||||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | ||||||
| golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||||
|  | golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= | ||||||
|  | golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||||
| golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||||
| golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| @@ -194,6 +281,10 @@ golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= | |||||||
| golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= | golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= | ||||||
| golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
|  | golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= | ||||||
|  | golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
|  | golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= | ||||||
|  | golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||||
| golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||||
| golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= | golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= | ||||||
| @@ -202,6 +293,10 @@ golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= | |||||||
| golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= | golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= | ||||||
| golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= | golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= | ||||||
| golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= | golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= | ||||||
|  | golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= | ||||||
|  | golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= | ||||||
|  | golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw= | ||||||
|  | golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= | ||||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| @@ -210,6 +305,8 @@ golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | |||||||
| golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | ||||||
| golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= | golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= | ||||||
| golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||||
|  | golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= | ||||||
|  | golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||||
| golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | ||||||
| golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | ||||||
| @@ -221,6 +318,10 @@ google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7 | |||||||
| google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
| google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= | google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= | ||||||
| google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
|  | google.golang.org/protobuf v1.34.0 h1:Qo/qEd2RZPCf2nKuorzksSknv0d3ERwp1vFG38gSmH4= | ||||||
|  | google.golang.org/protobuf v1.34.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
|  | google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= | ||||||
|  | google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||||
| gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.400" | const GoextVersion = "0.0.465" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2024-03-09T14:59:32+0100" | const GoextVersionTimestamp = "2024-06-03T09:39:57+0200" | ||||||
|   | |||||||
| @@ -217,6 +217,7 @@ type decodeState struct { | |||||||
| 	savedError            error | 	savedError            error | ||||||
| 	useNumber             bool | 	useNumber             bool | ||||||
| 	disallowUnknownFields bool | 	disallowUnknownFields bool | ||||||
|  | 	tagkey                *string | ||||||
| } | } | ||||||
|  |  | ||||||
| // readIndex returns the position of the last byte read. | // readIndex returns the position of the last byte read. | ||||||
| @@ -652,7 +653,11 @@ func (d *decodeState) object(v reflect.Value) error { | |||||||
| 			v.Set(reflect.MakeMap(t)) | 			v.Set(reflect.MakeMap(t)) | ||||||
| 		} | 		} | ||||||
| 	case reflect.Struct: | 	case reflect.Struct: | ||||||
| 		fields = cachedTypeFields(t) | 		tagkey := "json" | ||||||
|  | 		if d.tagkey != nil { | ||||||
|  | 			tagkey = *d.tagkey | ||||||
|  | 		} | ||||||
|  | 		fields = cachedTypeFields(t, tagkey) | ||||||
| 		// ok | 		// ok | ||||||
| 	default: | 	default: | ||||||
| 		d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)}) | 		d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)}) | ||||||
|   | |||||||
| @@ -382,7 +382,12 @@ func isEmptyValue(v reflect.Value) bool { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) { | func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) { | ||||||
| 	valueEncoder(v)(e, v, opts) | 	tagkey := "json" | ||||||
|  | 	if opts.tagkey != nil { | ||||||
|  | 		tagkey = *opts.tagkey | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	valueEncoder(v, tagkey)(e, v, opts) | ||||||
| } | } | ||||||
|  |  | ||||||
| type encOpts struct { | type encOpts struct { | ||||||
| @@ -397,20 +402,22 @@ type encOpts struct { | |||||||
| 	// filter matches jsonfilter tag of struct | 	// filter matches jsonfilter tag of struct | ||||||
| 	// marshals if no jsonfilter is set or otherwise if jsonfilter has the filter value | 	// marshals if no jsonfilter is set or otherwise if jsonfilter has the filter value | ||||||
| 	filter *string | 	filter *string | ||||||
|  | 	// use different tag instead of "json" | ||||||
|  | 	tagkey *string | ||||||
| } | } | ||||||
|  |  | ||||||
| type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts) | type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts) | ||||||
|  |  | ||||||
| var encoderCache sync.Map // map[reflect.Type]encoderFunc | var encoderCache sync.Map // map[reflect.Type]encoderFunc | ||||||
|  |  | ||||||
| func valueEncoder(v reflect.Value) encoderFunc { | func valueEncoder(v reflect.Value, tagkey string) encoderFunc { | ||||||
| 	if !v.IsValid() { | 	if !v.IsValid() { | ||||||
| 		return invalidValueEncoder | 		return invalidValueEncoder | ||||||
| 	} | 	} | ||||||
| 	return typeEncoder(v.Type()) | 	return typeEncoder(v.Type(), tagkey) | ||||||
| } | } | ||||||
|  |  | ||||||
| func typeEncoder(t reflect.Type) encoderFunc { | func typeEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	if fi, ok := encoderCache.Load(t); ok { | 	if fi, ok := encoderCache.Load(t); ok { | ||||||
| 		return fi.(encoderFunc) | 		return fi.(encoderFunc) | ||||||
| 	} | 	} | ||||||
| @@ -433,7 +440,7 @@ func typeEncoder(t reflect.Type) encoderFunc { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// Compute the real encoder and replace the indirect func with it. | 	// Compute the real encoder and replace the indirect func with it. | ||||||
| 	f = newTypeEncoder(t, true) | 	f = newTypeEncoder(t, true, tagkey) | ||||||
| 	wg.Done() | 	wg.Done() | ||||||
| 	encoderCache.Store(t, f) | 	encoderCache.Store(t, f) | ||||||
| 	return f | 	return f | ||||||
| @@ -446,19 +453,19 @@ var ( | |||||||
|  |  | ||||||
| // newTypeEncoder constructs an encoderFunc for a type. | // newTypeEncoder constructs an encoderFunc for a type. | ||||||
| // The returned encoder only checks CanAddr when allowAddr is true. | // The returned encoder only checks CanAddr when allowAddr is true. | ||||||
| func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc { | func newTypeEncoder(t reflect.Type, allowAddr bool, tagkey string) encoderFunc { | ||||||
| 	// If we have a non-pointer value whose type implements | 	// If we have a non-pointer value whose type implements | ||||||
| 	// Marshaler with a value receiver, then we're better off taking | 	// Marshaler with a value receiver, then we're better off taking | ||||||
| 	// the address of the value - otherwise we end up with an | 	// the address of the value - otherwise we end up with an | ||||||
| 	// allocation as we cast the value to an interface. | 	// allocation as we cast the value to an interface. | ||||||
| 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(marshalerType) { | 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(marshalerType) { | ||||||
| 		return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false)) | 		return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false, tagkey)) | ||||||
| 	} | 	} | ||||||
| 	if t.Implements(marshalerType) { | 	if t.Implements(marshalerType) { | ||||||
| 		return marshalerEncoder | 		return marshalerEncoder | ||||||
| 	} | 	} | ||||||
| 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(textMarshalerType) { | 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(textMarshalerType) { | ||||||
| 		return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false)) | 		return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false, tagkey)) | ||||||
| 	} | 	} | ||||||
| 	if t.Implements(textMarshalerType) { | 	if t.Implements(textMarshalerType) { | ||||||
| 		return textMarshalerEncoder | 		return textMarshalerEncoder | ||||||
| @@ -480,15 +487,15 @@ func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc { | |||||||
| 	case reflect.Interface: | 	case reflect.Interface: | ||||||
| 		return interfaceEncoder | 		return interfaceEncoder | ||||||
| 	case reflect.Struct: | 	case reflect.Struct: | ||||||
| 		return newStructEncoder(t) | 		return newStructEncoder(t, tagkey) | ||||||
| 	case reflect.Map: | 	case reflect.Map: | ||||||
| 		return newMapEncoder(t) | 		return newMapEncoder(t, tagkey) | ||||||
| 	case reflect.Slice: | 	case reflect.Slice: | ||||||
| 		return newSliceEncoder(t) | 		return newSliceEncoder(t, tagkey) | ||||||
| 	case reflect.Array: | 	case reflect.Array: | ||||||
| 		return newArrayEncoder(t) | 		return newArrayEncoder(t, tagkey) | ||||||
| 	case reflect.Pointer: | 	case reflect.Pointer: | ||||||
| 		return newPtrEncoder(t) | 		return newPtrEncoder(t, tagkey) | ||||||
| 	default: | 	default: | ||||||
| 		return unsupportedTypeEncoder | 		return unsupportedTypeEncoder | ||||||
| 	} | 	} | ||||||
| @@ -801,8 +808,8 @@ FieldLoop: | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func newStructEncoder(t reflect.Type) encoderFunc { | func newStructEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	se := structEncoder{fields: cachedTypeFields(t)} | 	se := structEncoder{fields: cachedTypeFields(t, tagkey)} | ||||||
| 	return se.encode | 	return se.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -855,7 +862,7 @@ func (me mapEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | |||||||
| 	e.ptrLevel-- | 	e.ptrLevel-- | ||||||
| } | } | ||||||
|  |  | ||||||
| func newMapEncoder(t reflect.Type) encoderFunc { | func newMapEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	switch t.Key().Kind() { | 	switch t.Key().Kind() { | ||||||
| 	case reflect.String, | 	case reflect.String, | ||||||
| 		reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, | 		reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, | ||||||
| @@ -865,7 +872,7 @@ func newMapEncoder(t reflect.Type) encoderFunc { | |||||||
| 			return unsupportedTypeEncoder | 			return unsupportedTypeEncoder | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	me := mapEncoder{typeEncoder(t.Elem())} | 	me := mapEncoder{typeEncoder(t.Elem(), tagkey)} | ||||||
| 	return me.encode | 	return me.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -936,7 +943,7 @@ func (se sliceEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | |||||||
| 	e.ptrLevel-- | 	e.ptrLevel-- | ||||||
| } | } | ||||||
|  |  | ||||||
| func newSliceEncoder(t reflect.Type) encoderFunc { | func newSliceEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	// Byte slices get special treatment; arrays don't. | 	// Byte slices get special treatment; arrays don't. | ||||||
| 	if t.Elem().Kind() == reflect.Uint8 { | 	if t.Elem().Kind() == reflect.Uint8 { | ||||||
| 		p := reflect.PointerTo(t.Elem()) | 		p := reflect.PointerTo(t.Elem()) | ||||||
| @@ -944,7 +951,7 @@ func newSliceEncoder(t reflect.Type) encoderFunc { | |||||||
| 			return encodeByteSlice | 			return encodeByteSlice | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	enc := sliceEncoder{newArrayEncoder(t)} | 	enc := sliceEncoder{newArrayEncoder(t, tagkey)} | ||||||
| 	return enc.encode | 	return enc.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -964,8 +971,8 @@ func (ae arrayEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | |||||||
| 	e.WriteByte(']') | 	e.WriteByte(']') | ||||||
| } | } | ||||||
|  |  | ||||||
| func newArrayEncoder(t reflect.Type) encoderFunc { | func newArrayEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	enc := arrayEncoder{typeEncoder(t.Elem())} | 	enc := arrayEncoder{typeEncoder(t.Elem(), tagkey)} | ||||||
| 	return enc.encode | 	return enc.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -992,8 +999,8 @@ func (pe ptrEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | |||||||
| 	e.ptrLevel-- | 	e.ptrLevel-- | ||||||
| } | } | ||||||
|  |  | ||||||
| func newPtrEncoder(t reflect.Type) encoderFunc { | func newPtrEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	enc := ptrEncoder{typeEncoder(t.Elem())} | 	enc := ptrEncoder{typeEncoder(t.Elem(), tagkey)} | ||||||
| 	return enc.encode | 	return enc.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1270,7 +1277,7 @@ func (x byIndex) Less(i, j int) bool { | |||||||
| // typeFields returns a list of fields that JSON should recognize for the given type. | // typeFields returns a list of fields that JSON should recognize for the given type. | ||||||
| // The algorithm is breadth-first search over the set of structs to include - the top struct | // The algorithm is breadth-first search over the set of structs to include - the top struct | ||||||
| // and then any reachable anonymous structs. | // and then any reachable anonymous structs. | ||||||
| func typeFields(t reflect.Type) structFields { | func typeFields(t reflect.Type, tagkey string) structFields { | ||||||
| 	// Anonymous fields to explore at the current level and the next. | 	// Anonymous fields to explore at the current level and the next. | ||||||
| 	current := []field{} | 	current := []field{} | ||||||
| 	next := []field{{typ: t}} | 	next := []field{{typ: t}} | ||||||
| @@ -1315,7 +1322,7 @@ func typeFields(t reflect.Type) structFields { | |||||||
| 					// Ignore unexported non-embedded fields. | 					// Ignore unexported non-embedded fields. | ||||||
| 					continue | 					continue | ||||||
| 				} | 				} | ||||||
| 				tag := sf.Tag.Get("json") | 				tag := sf.Tag.Get(tagkey) | ||||||
| 				if tag == "-" { | 				if tag == "-" { | ||||||
| 					continue | 					continue | ||||||
| 				} | 				} | ||||||
| @@ -1449,7 +1456,7 @@ func typeFields(t reflect.Type) structFields { | |||||||
|  |  | ||||||
| 	for i := range fields { | 	for i := range fields { | ||||||
| 		f := &fields[i] | 		f := &fields[i] | ||||||
| 		f.encoder = typeEncoder(typeByIndex(t, f.index)) | 		f.encoder = typeEncoder(typeByIndex(t, f.index), tagkey) | ||||||
| 	} | 	} | ||||||
| 	nameIndex := make(map[string]int, len(fields)) | 	nameIndex := make(map[string]int, len(fields)) | ||||||
| 	for i, field := range fields { | 	for i, field := range fields { | ||||||
| @@ -1474,13 +1481,26 @@ func dominantField(fields []field) (field, bool) { | |||||||
| 	return fields[0], true | 	return fields[0], true | ||||||
| } | } | ||||||
|  |  | ||||||
| var fieldCache sync.Map // map[reflect.Type]structFields | var fieldCache sync.Map // map[string]map[reflect.Type]structFields | ||||||
|  |  | ||||||
| // cachedTypeFields is like typeFields but uses a cache to avoid repeated work. | // cachedTypeFields is like typeFields but uses a cache to avoid repeated work. | ||||||
| func cachedTypeFields(t reflect.Type) structFields { | func cachedTypeFields(t reflect.Type, tagkey string) structFields { | ||||||
| 	if f, ok := fieldCache.Load(t); ok { | 	if m0, ok := fieldCache.Load(tagkey); ok { | ||||||
|  |  | ||||||
|  | 		if f, ok := m0.(*sync.Map).Load(t); ok { | ||||||
| 			return f.(structFields) | 			return f.(structFields) | ||||||
| 		} | 		} | ||||||
| 	f, _ := fieldCache.LoadOrStore(t, typeFields(t)) | 		f, _ := m0.(*sync.Map).LoadOrStore(t, typeFields(t, tagkey)) | ||||||
|  | 		return f.(structFields) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		m0 := &sync.Map{} | ||||||
|  | 		f, _ := m0.LoadOrStore(t, typeFields(t, tagkey)) | ||||||
|  |  | ||||||
|  | 		fieldCache.Store(tagkey, m0) | ||||||
|  |  | ||||||
| 		return f.(structFields) | 		return f.(structFields) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | } | ||||||
|   | |||||||
| @@ -41,6 +41,9 @@ func (dec *Decoder) UseNumber() { dec.d.useNumber = true } | |||||||
| // non-ignored, exported fields in the destination. | // non-ignored, exported fields in the destination. | ||||||
| func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true } | func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true } | ||||||
|  |  | ||||||
|  | // TagKey sets a different TagKey (instead of "json") | ||||||
|  | func (dec *Decoder) TagKey(v string) { dec.d.tagkey = &v } | ||||||
|  |  | ||||||
| // Decode reads the next JSON-encoded value from its | // Decode reads the next JSON-encoded value from its | ||||||
| // input and stores it in the value pointed to by v. | // input and stores it in the value pointed to by v. | ||||||
| // | // | ||||||
|   | |||||||
							
								
								
									
										3
									
								
								imageext/enums.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								imageext/enums.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | |||||||
|  | package imageext | ||||||
|  |  | ||||||
|  | //go:generate go run ../_gen/enum-generate.go -- enums_gen.go | ||||||
							
								
								
									
										216
									
								
								imageext/enums_gen.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										216
									
								
								imageext/enums_gen.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,216 @@ | |||||||
|  | // Code generated by enum-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package imageext | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/enums" | ||||||
|  |  | ||||||
|  | const ChecksumEnumGenerator = "1da5383c33ee442fd0b899369053f66bdc85bed2dbf906949d3edfeedfe13340" // GoExtVersion: 0.0.449 | ||||||
|  |  | ||||||
|  | // ================================ ImageFit ================================ | ||||||
|  | // | ||||||
|  | // File:       image.go | ||||||
|  | // StringEnum: true | ||||||
|  | // DescrEnum:  false | ||||||
|  | // DataEnum:   false | ||||||
|  | // | ||||||
|  |  | ||||||
|  | var __ImageFitValues = []ImageFit{ | ||||||
|  | 	ImageFitStretch, | ||||||
|  | 	ImageFitCover, | ||||||
|  | 	ImageFitContainCenter, | ||||||
|  | 	ImageFitContainTopLeft, | ||||||
|  | 	ImageFitContainTopRight, | ||||||
|  | 	ImageFitContainBottomLeft, | ||||||
|  | 	ImageFitContainBottomRight, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var __ImageFitVarnames = map[ImageFit]string{ | ||||||
|  | 	ImageFitStretch:            "ImageFitStretch", | ||||||
|  | 	ImageFitCover:              "ImageFitCover", | ||||||
|  | 	ImageFitContainCenter:      "ImageFitContainCenter", | ||||||
|  | 	ImageFitContainTopLeft:     "ImageFitContainTopLeft", | ||||||
|  | 	ImageFitContainTopRight:    "ImageFitContainTopRight", | ||||||
|  | 	ImageFitContainBottomLeft:  "ImageFitContainBottomLeft", | ||||||
|  | 	ImageFitContainBottomRight: "ImageFitContainBottomRight", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) Valid() bool { | ||||||
|  | 	return langext.InArray(e, __ImageFitValues) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) Values() []ImageFit { | ||||||
|  | 	return __ImageFitValues | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) ValuesAny() []any { | ||||||
|  | 	return langext.ArrCastToAny(__ImageFitValues) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return ImageFitValuesMeta() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) String() string { | ||||||
|  | 	return string(e) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) VarName() string { | ||||||
|  | 	if d, ok := __ImageFitVarnames[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) TypeName() string { | ||||||
|  | 	return "ImageFit" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) PackageName() string { | ||||||
|  | 	return "media" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) Meta() enums.EnumMetaValue { | ||||||
|  | 	return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ParseImageFit(vv string) (ImageFit, bool) { | ||||||
|  | 	for _, ev := range __ImageFitValues { | ||||||
|  | 		if string(ev) == vv { | ||||||
|  | 			return ev, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ImageFitValues() []ImageFit { | ||||||
|  | 	return __ImageFitValues | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ImageFitValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return []enums.EnumMetaValue{ | ||||||
|  | 		ImageFitStretch.Meta(), | ||||||
|  | 		ImageFitCover.Meta(), | ||||||
|  | 		ImageFitContainCenter.Meta(), | ||||||
|  | 		ImageFitContainTopLeft.Meta(), | ||||||
|  | 		ImageFitContainTopRight.Meta(), | ||||||
|  | 		ImageFitContainBottomLeft.Meta(), | ||||||
|  | 		ImageFitContainBottomRight.Meta(), | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ================================ ImageCompresson ================================ | ||||||
|  | // | ||||||
|  | // File:       image.go | ||||||
|  | // StringEnum: true | ||||||
|  | // DescrEnum:  false | ||||||
|  | // DataEnum:   false | ||||||
|  | // | ||||||
|  |  | ||||||
|  | var __ImageCompressonValues = []ImageCompresson{ | ||||||
|  | 	CompressionPNGNone, | ||||||
|  | 	CompressionPNGSpeed, | ||||||
|  | 	CompressionPNGBest, | ||||||
|  | 	CompressionJPEG100, | ||||||
|  | 	CompressionJPEG90, | ||||||
|  | 	CompressionJPEG80, | ||||||
|  | 	CompressionJPEG70, | ||||||
|  | 	CompressionJPEG60, | ||||||
|  | 	CompressionJPEG50, | ||||||
|  | 	CompressionJPEG25, | ||||||
|  | 	CompressionJPEG10, | ||||||
|  | 	CompressionJPEG1, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var __ImageCompressonVarnames = map[ImageCompresson]string{ | ||||||
|  | 	CompressionPNGNone:  "CompressionPNGNone", | ||||||
|  | 	CompressionPNGSpeed: "CompressionPNGSpeed", | ||||||
|  | 	CompressionPNGBest:  "CompressionPNGBest", | ||||||
|  | 	CompressionJPEG100:  "CompressionJPEG100", | ||||||
|  | 	CompressionJPEG90:   "CompressionJPEG90", | ||||||
|  | 	CompressionJPEG80:   "CompressionJPEG80", | ||||||
|  | 	CompressionJPEG70:   "CompressionJPEG70", | ||||||
|  | 	CompressionJPEG60:   "CompressionJPEG60", | ||||||
|  | 	CompressionJPEG50:   "CompressionJPEG50", | ||||||
|  | 	CompressionJPEG25:   "CompressionJPEG25", | ||||||
|  | 	CompressionJPEG10:   "CompressionJPEG10", | ||||||
|  | 	CompressionJPEG1:    "CompressionJPEG1", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) Valid() bool { | ||||||
|  | 	return langext.InArray(e, __ImageCompressonValues) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) Values() []ImageCompresson { | ||||||
|  | 	return __ImageCompressonValues | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) ValuesAny() []any { | ||||||
|  | 	return langext.ArrCastToAny(__ImageCompressonValues) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return ImageCompressonValuesMeta() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) String() string { | ||||||
|  | 	return string(e) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) VarName() string { | ||||||
|  | 	if d, ok := __ImageCompressonVarnames[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) TypeName() string { | ||||||
|  | 	return "ImageCompresson" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) PackageName() string { | ||||||
|  | 	return "media" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) Meta() enums.EnumMetaValue { | ||||||
|  | 	return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ParseImageCompresson(vv string) (ImageCompresson, bool) { | ||||||
|  | 	for _, ev := range __ImageCompressonValues { | ||||||
|  | 		if string(ev) == vv { | ||||||
|  | 			return ev, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ImageCompressonValues() []ImageCompresson { | ||||||
|  | 	return __ImageCompressonValues | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ImageCompressonValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return []enums.EnumMetaValue{ | ||||||
|  | 		CompressionPNGNone.Meta(), | ||||||
|  | 		CompressionPNGSpeed.Meta(), | ||||||
|  | 		CompressionPNGBest.Meta(), | ||||||
|  | 		CompressionJPEG100.Meta(), | ||||||
|  | 		CompressionJPEG90.Meta(), | ||||||
|  | 		CompressionJPEG80.Meta(), | ||||||
|  | 		CompressionJPEG70.Meta(), | ||||||
|  | 		CompressionJPEG60.Meta(), | ||||||
|  | 		CompressionJPEG50.Meta(), | ||||||
|  | 		CompressionJPEG25.Meta(), | ||||||
|  | 		CompressionJPEG10.Meta(), | ||||||
|  | 		CompressionJPEG1.Meta(), | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ================================ ================= ================================ | ||||||
|  |  | ||||||
|  | func AllPackageEnums() []enums.Enum { | ||||||
|  | 	return []enums.Enum{ | ||||||
|  | 		ImageFitStretch,    // ImageFit | ||||||
|  | 		CompressionPNGNone, // ImageCompresson | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										321
									
								
								imageext/image.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										321
									
								
								imageext/image.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,321 @@ | |||||||
|  | package imageext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"fmt" | ||||||
|  | 	"github.com/disintegration/imaging" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/mathext" | ||||||
|  | 	"image" | ||||||
|  | 	"image/color" | ||||||
|  | 	"image/draw" | ||||||
|  | 	"image/jpeg" | ||||||
|  | 	"image/png" | ||||||
|  | 	"io" | ||||||
|  | 	"math" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ImageFit string //@enum:type | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	ImageFitStretch            ImageFit = "STRETCH" | ||||||
|  | 	ImageFitCover              ImageFit = "COVER" | ||||||
|  | 	ImageFitContainCenter      ImageFit = "CONTAIN_CENTER" | ||||||
|  | 	ImageFitContainTopLeft     ImageFit = "CONTAIN_TOPLEFT" | ||||||
|  | 	ImageFitContainTopRight    ImageFit = "CONTAIN_TOPRIGHT" | ||||||
|  | 	ImageFitContainBottomLeft  ImageFit = "CONTAIN_BOTTOMLEFT" | ||||||
|  | 	ImageFitContainBottomRight ImageFit = "CONTAIN_BOTTOMRIGHT" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ImageCrop struct { // all crop values are percentages! | ||||||
|  |  | ||||||
|  | 	CropX      float64 `bson:"cropX"      json:"cropX"` | ||||||
|  | 	CropY      float64 `bson:"cropY"      json:"cropY"` | ||||||
|  | 	CropWidth  float64 `bson:"cropWidth"  json:"cropWidth"` | ||||||
|  | 	CropHeight float64 `bson:"cropHeight" json:"cropHeight"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ImageCompresson string //@enum:type | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	CompressionPNGNone  ImageCompresson = "PNG_NONE" | ||||||
|  | 	CompressionPNGSpeed ImageCompresson = "PNG_SPEED" | ||||||
|  | 	CompressionPNGBest  ImageCompresson = "PNG_BEST" | ||||||
|  | 	CompressionJPEG100  ImageCompresson = "JPEG_100" | ||||||
|  | 	CompressionJPEG90   ImageCompresson = "JPEG_090" | ||||||
|  | 	CompressionJPEG80   ImageCompresson = "JPEG_080" | ||||||
|  | 	CompressionJPEG70   ImageCompresson = "JPEG_070" | ||||||
|  | 	CompressionJPEG60   ImageCompresson = "JPEG_060" | ||||||
|  | 	CompressionJPEG50   ImageCompresson = "JPEG_050" | ||||||
|  | 	CompressionJPEG25   ImageCompresson = "JPEG_025" | ||||||
|  | 	CompressionJPEG10   ImageCompresson = "JPEG_010" | ||||||
|  | 	CompressionJPEG1    ImageCompresson = "JPEG_001" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func CropImage(img image.Image, px float64, py float64, pw float64, ph float64) (image.Image, error) { | ||||||
|  |  | ||||||
|  | 	type subImager interface { | ||||||
|  | 		SubImage(r image.Rectangle) image.Image | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	x := int(float64(img.Bounds().Dx()) * px) | ||||||
|  | 	y := int(float64(img.Bounds().Dy()) * py) | ||||||
|  | 	w := int(float64(img.Bounds().Dx()) * pw) | ||||||
|  | 	h := int(float64(img.Bounds().Dy()) * ph) | ||||||
|  |  | ||||||
|  | 	if simg, ok := img.(subImager); ok { | ||||||
|  |  | ||||||
|  | 		return simg.SubImage(image.Rect(x, y, x+w, y+h)), nil | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		bfr1 := bytes.Buffer{} | ||||||
|  | 		err := png.Encode(&bfr1, img) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		imgPNG, err := png.Decode(&bfr1) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return imgPNG.(subImager).SubImage(image.Rect(x, y, w+w, y+h)), nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func EncodeImage(img image.Image, compression ImageCompresson) (bytes.Buffer, string, error) { | ||||||
|  | 	var err error | ||||||
|  |  | ||||||
|  | 	bfr := bytes.Buffer{} | ||||||
|  |  | ||||||
|  | 	switch compression { | ||||||
|  | 	case CompressionPNGNone: | ||||||
|  | 		enc := &png.Encoder{CompressionLevel: png.NoCompression} | ||||||
|  | 		err = enc.Encode(&bfr, img) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/png", nil | ||||||
|  | 	case CompressionPNGSpeed: | ||||||
|  | 		enc := &png.Encoder{CompressionLevel: png.BestSpeed} | ||||||
|  | 		err = enc.Encode(&bfr, img) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/png", nil | ||||||
|  | 	case CompressionPNGBest: | ||||||
|  | 		enc := &png.Encoder{CompressionLevel: png.BestCompression} | ||||||
|  | 		err = enc.Encode(&bfr, img) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/png", nil | ||||||
|  | 	case CompressionJPEG100: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 100}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG90: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 90}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG80: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 80}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG70: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 70}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG60: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 60}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG50: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 50}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG25: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 25}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG10: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 10}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG1: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 1}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	default: | ||||||
|  | 		return bytes.Buffer{}, "", exerr.New(exerr.TypeInternal, "unknown compression method: "+compression.String()).Build() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fillColor color.Color) (image.Image, error) { | ||||||
|  |  | ||||||
|  | 	iw := img.Bounds().Size().X | ||||||
|  | 	ih := img.Bounds().Size().Y | ||||||
|  |  | ||||||
|  | 	// [iw, ih]   is the size of the image | ||||||
|  | 	// [bbw, bbh] is the target bounding box, | ||||||
|  | 	//             - it specifies the target ratio | ||||||
|  | 	//             - and the maximal target resolution | ||||||
|  |  | ||||||
|  | 	facW := float64(iw) / bbw | ||||||
|  | 	facH := float64(ih) / bbh | ||||||
|  |  | ||||||
|  | 	// facW is the ratio between iw and bbw | ||||||
|  | 	//  - it is the factor by which the bounding box must be multiplied to reach the image size (in the x-axis) | ||||||
|  | 	// | ||||||
|  | 	// (same is true for facH, but for the height and y-axis) | ||||||
|  |  | ||||||
|  | 	if fit == ImageFitCover { | ||||||
|  |  | ||||||
|  | 		// image-fit:cover completely fills the target-bounding-box, it potentially cuts parts of the image away | ||||||
|  |  | ||||||
|  | 		// we use the smaller (!) value of facW and facH, because we want to have the smallest possible destination rect (due to file size) | ||||||
|  | 		// and because the image is made to completely fill the bounding-box, the smaller factor (= teh dimension the image is stretched more) is relevant | ||||||
|  |  | ||||||
|  | 		// but we cap `fac` at 1 (can be larger than 1) | ||||||
|  | 		// a value >1 would mean the final image resolution is biger than the bounding box, which we do not want. | ||||||
|  |  | ||||||
|  | 		// if the initial image (iw, ih) is already bigger than the bounding box (bbw, bbh), facW and facH are always >1 and fac will be 1 | ||||||
|  | 		// which means we will simply use the bounding box as destination rect (and scale the image down) | ||||||
|  |  | ||||||
|  | 		fac := mathext.Clamp(mathext.Min(facW, facH), 0.0, 1.0) | ||||||
|  |  | ||||||
|  | 		// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio) | ||||||
|  |  | ||||||
|  | 		w := int(math.Round(bbw * fac)) | ||||||
|  | 		h := int(math.Round(bbh * fac)) | ||||||
|  |  | ||||||
|  | 		img = imaging.Fill(img, w, h, imaging.Center, imaging.Lanczos) | ||||||
|  |  | ||||||
|  | 		newImg := image.NewRGBA(image.Rect(0, 0, w, h)) | ||||||
|  |  | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over) | ||||||
|  |  | ||||||
|  | 		return newImg, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fit == ImageFitContainCenter || fit == ImageFitContainTopLeft || fit == ImageFitContainTopRight || fit == ImageFitContainBottomLeft || fit == ImageFitContainBottomRight { | ||||||
|  |  | ||||||
|  | 		// image-fit:cover fills the target-bounding-box with the image, there is potentially empty-space, it potentially cuts parts of the image away | ||||||
|  |  | ||||||
|  | 		// we use the bigger (!) value of facW and facH, | ||||||
|  | 		// because the image is made to fit the bounding-box, the bigger factor (= the dimension the image is stretched less) is relevant | ||||||
|  |  | ||||||
|  | 		// but we cap `fac` at 1 (can be larger than 1) | ||||||
|  | 		// a value >1 would mean the final image resolution is biger than the bounding box, which we do not want. | ||||||
|  |  | ||||||
|  | 		// if the initial image (iw, ih) is already bigger than the bounding box (bbw, bbh), facW and facH are always >1 and fac will be 1 | ||||||
|  | 		// which means we will simply use the bounding box as destination rect (and scale the image down) | ||||||
|  |  | ||||||
|  | 		facOut := mathext.Clamp(mathext.Max(facW, facH), 0.0, 1.0) | ||||||
|  |  | ||||||
|  | 		// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio) | ||||||
|  |  | ||||||
|  | 		// [ow|oh] ==> size of output image (same ratio as bounding box [bbw|bbh]) | ||||||
|  |  | ||||||
|  | 		ow := int(math.Round(bbw * facOut)) | ||||||
|  | 		oh := int(math.Round(bbh * facOut)) | ||||||
|  |  | ||||||
|  | 		facScale := mathext.Min(float64(ow)/float64(iw), float64(oh)/float64(ih)) | ||||||
|  |  | ||||||
|  | 		// [dw|dh] ==> size of destination rect (where to draw source in output image) (same ratio as input image [iw|ih]) | ||||||
|  |  | ||||||
|  | 		dw := int(math.Round(float64(iw) * facScale)) | ||||||
|  | 		dh := int(math.Round(float64(ih) * facScale)) | ||||||
|  |  | ||||||
|  | 		img = imaging.Resize(img, dw, dh, imaging.Lanczos) | ||||||
|  |  | ||||||
|  | 		var destBounds image.Rectangle | ||||||
|  | 		if fit == ImageFitContainCenter { | ||||||
|  | 			destBounds = image.Rect((ow-dw)/2, (oh-dh)/2, (ow-dw)/2+dw, (oh-dh)/2+dh) | ||||||
|  | 		} else if fit == ImageFitContainTopLeft { | ||||||
|  | 			destBounds = image.Rect(0, 0, dw, dh) | ||||||
|  | 		} else if fit == ImageFitContainTopRight { | ||||||
|  | 			destBounds = image.Rect(ow-dw, 0, ow, dh) | ||||||
|  | 		} else if fit == ImageFitContainBottomLeft { | ||||||
|  | 			destBounds = image.Rect(0, oh-dh, dw, oh) | ||||||
|  | 		} else if fit == ImageFitContainBottomRight { | ||||||
|  | 			destBounds = image.Rect(ow-dw, oh-dh, ow, oh) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		newImg := image.NewRGBA(image.Rect(0, 0, ow, oh)) | ||||||
|  |  | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||||
|  | 		draw.Draw(newImg, destBounds, img, image.Pt(0, 0), draw.Over) | ||||||
|  |  | ||||||
|  | 		return newImg, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fit == ImageFitStretch { | ||||||
|  |  | ||||||
|  | 		// image-fit:stretch simply stretches the image to the bounding box | ||||||
|  |  | ||||||
|  | 		// we use the bigger value of [facW;facH], to (potentially) scale the bounding box down before applying it | ||||||
|  | 		// theoretically we could directly use [bbw, bbh] in the call to imaging.Resize, | ||||||
|  | 		// but if the image is (a lot) smaller than the bouding box it is useful to scale it down to reduce final pdf filesize | ||||||
|  |  | ||||||
|  | 		// we also cap fac at 1, because we never want the final rect to be bigger than the inputted bounding box (see comments at start of method) | ||||||
|  |  | ||||||
|  | 		fac := mathext.Clamp(mathext.Max(facW, facH), 0.0, 1.0) | ||||||
|  |  | ||||||
|  | 		// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio) | ||||||
|  |  | ||||||
|  | 		w := int(math.Round(bbw * fac)) | ||||||
|  | 		h := int(math.Round(bbh * fac)) | ||||||
|  |  | ||||||
|  | 		img = imaging.Resize(img, w, h, imaging.Lanczos) | ||||||
|  |  | ||||||
|  | 		newImg := image.NewRGBA(image.Rect(0, 0, w, h)) | ||||||
|  |  | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over) | ||||||
|  |  | ||||||
|  | 		return newImg, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil, exerr.New(exerr.TypeInternal, fmt.Sprintf("unknown image-fit: '%s'", fit)).Build() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func VerifyAndDecodeImage(data io.Reader, mime string) (image.Image, error) { | ||||||
|  |  | ||||||
|  | 	if mime == "image/jpeg" { | ||||||
|  | 		img, err := jpeg.Decode(data) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "failed to decode blob as jpeg").WithType(exerr.TypeInvalidImage).Build() | ||||||
|  | 		} | ||||||
|  | 		return img, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if mime == "image/png" { | ||||||
|  | 		img, err := png.Decode(data) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "failed to decode blob as png").WithType(exerr.TypeInvalidImage).Build() | ||||||
|  | 		} | ||||||
|  | 		return img, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil, exerr.New(exerr.TypeInvalidMimeType, fmt.Sprintf("unknown/invalid image mimetype: '%s'", mime)).Build() | ||||||
|  | } | ||||||
| @@ -59,6 +59,18 @@ func ArrUnique[T comparable](array []T) []T { | |||||||
| 	return result | 	return result | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrUniqueStable[T comparable](array []T) []T { | ||||||
|  | 	hist := make(map[T]bool, len(array)) | ||||||
|  | 	result := make([]T, 0, len(array)) | ||||||
|  | 	for _, v := range array { | ||||||
|  | 		if _, ok := hist[v]; !ok { | ||||||
|  | 			hist[v] = true | ||||||
|  | 			result = append(result, v) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
| func ArrEqualsExact[T comparable](arr1 []T, arr2 []T) bool { | func ArrEqualsExact[T comparable](arr1 []T, arr2 []T) bool { | ||||||
| 	if len(arr1) != len(arr2) { | 	if len(arr1) != len(arr2) { | ||||||
| 		return false | 		return false | ||||||
| @@ -453,6 +465,26 @@ func ArrConcat[T any](arr ...[]T) []T { | |||||||
| 	return r | 	return r | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // ArrAppend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one | ||||||
|  | func ArrAppend[T any](arr []T, add ...T) []T { | ||||||
|  | 	r := ArrCopy(arr) | ||||||
|  | 	for _, v := range add { | ||||||
|  | 		r = append(r, v) | ||||||
|  | 	} | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ArrPrepend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one | ||||||
|  | // Also - in contrast to ArrAppend - the add values are inserted at the start of the resulting array (in reverse order) | ||||||
|  | func ArrPrepend[T any](arr []T, add ...T) []T { | ||||||
|  | 	out := make([]T, len(arr)+len(add)) | ||||||
|  | 	copy(out[len(add):], arr) | ||||||
|  | 	for i := 0; i < len(add); i++ { | ||||||
|  | 		out[len(add)-i-1] = add[i] | ||||||
|  | 	} | ||||||
|  | 	return out | ||||||
|  | } | ||||||
|  |  | ||||||
| // ArrCopy does a shallow copy of the 'in' array | // ArrCopy does a shallow copy of the 'in' array | ||||||
| func ArrCopy[T any](in []T) []T { | func ArrCopy[T any](in []T) []T { | ||||||
| 	out := make([]T, len(in)) | 	out := make([]T, len(in)) | ||||||
| @@ -468,6 +500,10 @@ func ArrRemove[T comparable](arr []T, needle T) []T { | |||||||
| 	return arr | 	return arr | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrRemoveAt[T any](arr []T, idx int) []T { | ||||||
|  | 	return append(arr[:idx], arr[idx+1:]...) | ||||||
|  | } | ||||||
|  |  | ||||||
| func ArrExcept[T comparable](arr []T, needles ...T) []T { | func ArrExcept[T comparable](arr []T, needles ...T) []T { | ||||||
| 	r := make([]T, 0, len(arr)) | 	r := make([]T, 0, len(arr)) | ||||||
| 	rmlist := ArrToSet(needles) | 	rmlist := ArrToSet(needles) | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ package langext | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"strings" | ||||||
| 	"testing" | 	"testing" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -10,3 +11,13 @@ func TestJoinString(t *testing.T) { | |||||||
| 	res := JoinString(ids, ",") | 	res := JoinString(ids, ",") | ||||||
| 	tst.AssertEqual(t, res, "1,2,3") | 	tst.AssertEqual(t, res, "1,2,3") | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func TestArrPrepend(t *testing.T) { | ||||||
|  | 	v1 := []string{"1", "2", "3"} | ||||||
|  |  | ||||||
|  | 	v2 := ArrPrepend(v1, "4", "5", "6") | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, strings.Join(v1, ""), "123") | ||||||
|  | 	tst.AssertEqual(t, strings.Join(v2, ""), "654123") | ||||||
|  |  | ||||||
|  | } | ||||||
|   | |||||||
| @@ -5,12 +5,76 @@ import ( | |||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func Coalesce[T any](v *T, def T) T { | func Coalesce[T any](v1 *T, def T) T { | ||||||
| 	if v == nil { | 	if v1 != nil { | ||||||
| 		return def | 		return *v1 | ||||||
| 	} else { |  | ||||||
| 		return *v |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	return def | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CoalesceOpt[T any](v1 *T, v2 *T) *T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return v2 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Coalesce3[T any](v1 *T, v2 *T, def T) T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return *v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v2 != nil { | ||||||
|  | 		return *v2 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return def | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Coalesce3Opt[T any](v1 *T, v2 *T, v3 *T) *T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v2 != nil { | ||||||
|  | 		return v2 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return v3 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Coalesce4[T any](v1 *T, v2 *T, v3 *T, def T) T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return *v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v2 != nil { | ||||||
|  | 		return *v2 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v3 != nil { | ||||||
|  | 		return *v3 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return def | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Coalesce4Opt[T any](v1 *T, v2 *T, v3 *T, v4 *T) *T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v2 != nil { | ||||||
|  | 		return v2 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v3 != nil { | ||||||
|  | 		return v3 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return v4 | ||||||
| } | } | ||||||
|  |  | ||||||
| func CoalesceString(s *string, def string) string { | func CoalesceString(s *string, def string) string { | ||||||
|   | |||||||
| @@ -63,3 +63,51 @@ func PatchRemJson[JV string | []byte](rawjson JV, key string) (JV, error) { | |||||||
|  |  | ||||||
| 	return JV(newjson), nil | 	return JV(newjson), nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func MarshalJsonOrPanic(v any) string { | ||||||
|  | 	bin, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(bin) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonOrDefault(v any, def string) string { | ||||||
|  | 	bin, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return def | ||||||
|  | 	} | ||||||
|  | 	return string(bin) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonOrNil(v any) *string { | ||||||
|  | 	bin, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return Ptr(string(bin)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonIndentOrPanic(v any, prefix, indent string) string { | ||||||
|  | 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(bin) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonIndentOrDefault(v any, prefix, indent string, def string) string { | ||||||
|  | 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return def | ||||||
|  | 	} | ||||||
|  | 	return string(bin) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonIndentOrNil(v any, prefix, indent string) *string { | ||||||
|  | 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return Ptr(string(bin)) | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										19
									
								
								langext/object.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								langext/object.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import "encoding/json" | ||||||
|  |  | ||||||
|  | func DeepCopyByJson[T any](v T) (T, error) { | ||||||
|  |  | ||||||
|  | 	bin, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(T), err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var result T | ||||||
|  | 	err = json.Unmarshal(bin, &result) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(T), err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
| @@ -27,6 +27,10 @@ func DblPtrNil[T any]() **T { | |||||||
| 	return &v | 	return &v | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrPtr[T any](v ...T) *[]T { | ||||||
|  | 	return &v | ||||||
|  | } | ||||||
|  |  | ||||||
| func PtrInt32(v int32) *int32 { | func PtrInt32(v int32) *int32 { | ||||||
| 	return &v | 	return &v | ||||||
| } | } | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| @@ -34,6 +35,15 @@ func CreateGoExtBsonRegistry() *bsoncodec.Registry { | |||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.SecondsF64(0)), rfctime.SecondsF64(0)) | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.SecondsF64(0)), rfctime.SecondsF64(0)) | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(rfctime.SecondsF64(0))), rfctime.SecondsF64(0)) | 	rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(rfctime.SecondsF64(0))), rfctime.SecondsF64(0)) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorCategory{}), exerr.ErrorCategory{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorCategory{})), exerr.ErrorCategory{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorSeverity{}), exerr.ErrorSeverity{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorSeverity{})), exerr.ErrorSeverity{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorType{}), exerr.ErrorType{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorType{})), exerr.ErrorType{}) | ||||||
|  |  | ||||||
| 	bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) | 	bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) | ||||||
| 	bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb) | 	bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,19 +1,45 @@ | |||||||
| package reflectext | package reflectext | ||||||
|  |  | ||||||
| import "reflect" | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"reflect" | ||||||
|  | ) | ||||||
|  |  | ||||||
| func ConvertStructToMap(v any) any { | type ConvertStructToMapOpt struct { | ||||||
| 	return reflectToMap(reflect.ValueOf(v)) | 	KeepJsonMarshalTypes bool | ||||||
|  | 	MaxDepth             *int | ||||||
| } | } | ||||||
|  |  | ||||||
| func reflectToMap(fv reflect.Value) any { | func ConvertStructToMap(v any, opts ...ConvertStructToMapOpt) map[string]any { | ||||||
|  | 	opt := ConvertStructToMapOpt{} | ||||||
|  | 	if len(opts) > 0 { | ||||||
|  | 		opt = opts[0] | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	res := reflectToMap(reflect.ValueOf(v), 1, opt) | ||||||
|  |  | ||||||
|  | 	if v, ok := res.(map[string]any); ok { | ||||||
|  | 		return v | ||||||
|  | 	} else if langext.IsNil(res) { | ||||||
|  | 		return nil | ||||||
|  | 	} else { | ||||||
|  | 		panic("not an object") | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func reflectToMap(fv reflect.Value, depth int, opt ConvertStructToMapOpt) any { | ||||||
|  |  | ||||||
|  | 	if opt.MaxDepth != nil && depth > *opt.MaxDepth { | ||||||
|  | 		return fv.Interface() | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if fv.Kind() == reflect.Ptr { | 	if fv.Kind() == reflect.Ptr { | ||||||
|  |  | ||||||
| 		if fv.IsNil() { | 		if fv.IsNil() { | ||||||
| 			return nil | 			return nil | ||||||
| 		} else { | 		} else { | ||||||
| 			return reflectToMap(fv.Elem()) | 			return reflectToMap(fv.Elem(), depth, opt) | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
| @@ -30,7 +56,7 @@ func reflectToMap(fv reflect.Value) any { | |||||||
| 		arrlen := fv.Len() | 		arrlen := fv.Len() | ||||||
| 		arr := make([]any, arrlen) | 		arr := make([]any, arrlen) | ||||||
| 		for i := 0; i < arrlen; i++ { | 		for i := 0; i < arrlen; i++ { | ||||||
| 			arr[i] = reflectToMap(fv.Index(i)) | 			arr[i] = reflectToMap(fv.Index(i), depth+1, opt) | ||||||
| 		} | 		} | ||||||
| 		return arr | 		return arr | ||||||
|  |  | ||||||
| @@ -41,7 +67,7 @@ func reflectToMap(fv reflect.Value) any { | |||||||
| 		arrlen := fv.Len() | 		arrlen := fv.Len() | ||||||
| 		arr := make([]any, arrlen) | 		arr := make([]any, arrlen) | ||||||
| 		for i := 0; i < arrlen; i++ { | 		for i := 0; i < arrlen; i++ { | ||||||
| 			arr[i] = reflectToMap(fv.Index(i)) | 			arr[i] = reflectToMap(fv.Index(i), depth+1, opt) | ||||||
| 		} | 		} | ||||||
| 		return arr | 		return arr | ||||||
|  |  | ||||||
| @@ -56,11 +82,15 @@ func reflectToMap(fv reflect.Value) any { | |||||||
|  |  | ||||||
| 	if fv.Kind() == reflect.Struct { | 	if fv.Kind() == reflect.Struct { | ||||||
|  |  | ||||||
|  | 		if opt.KeepJsonMarshalTypes && fv.Type().Implements(reflect.TypeFor[json.Marshaler]()) { | ||||||
|  | 			return fv.Interface() | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		res := make(map[string]any) | 		res := make(map[string]any) | ||||||
|  |  | ||||||
| 		for i := 0; i < fv.NumField(); i++ { | 		for i := 0; i < fv.NumField(); i++ { | ||||||
| 			if fv.Type().Field(i).IsExported() { | 			if fv.Type().Field(i).IsExported() { | ||||||
| 				res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i)) | 				res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i), depth+1, opt) | ||||||
| 			} | 			} | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										42
									
								
								reflectext/convertToMap_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								reflectext/convertToMap_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,42 @@ | |||||||
|  | package reflectext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestConvertStructToMap(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	type tst struct { | ||||||
|  | 		FieldA  int | ||||||
|  | 		FieldB  string | ||||||
|  | 		FieldC  time.Time | ||||||
|  | 		FieldD  []float64 | ||||||
|  | 		FieldE1 *int | ||||||
|  | 		FieldE2 **int | ||||||
|  | 		FieldE3 *int | ||||||
|  | 		FieldE4 **int | ||||||
|  | 		FieldE5 *int | ||||||
|  | 		FieldE6 **int | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	value := tst{ | ||||||
|  | 		FieldA:  123, | ||||||
|  | 		FieldB:  "hello", | ||||||
|  | 		FieldC:  time.Date(2020, 05, 12, 8, 30, 0, 0, time.UTC), | ||||||
|  | 		FieldD:  []float64{1, 2, 3, 4, 5, 6, 7}, | ||||||
|  | 		FieldE1: nil, | ||||||
|  | 		FieldE2: nil, | ||||||
|  | 		FieldE3: langext.Ptr(12), | ||||||
|  | 		FieldE4: langext.DblPtr(12), | ||||||
|  | 		FieldE5: nil, | ||||||
|  | 		FieldE6: langext.DblPtrNil[int](), | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	valueOut := ConvertStructToMap(value, ConvertStructToMapOpt{KeepJsonMarshalTypes: true}) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("%+v\n", valueOut) | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -47,9 +47,3 @@ func TestGetMapField(t *testing.T) { | |||||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test")), "12 true") | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test")), "12 true") | ||||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test2")), "0 false") | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test2")), "0 false") | ||||||
| } | } | ||||||
|  |  | ||||||
| func main2() { |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func main() { |  | ||||||
| } |  | ||||||
|   | |||||||
| @@ -9,6 +9,8 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"reflect" | 	"reflect" | ||||||
|  | 	"strconv" | ||||||
|  | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -65,36 +67,20 @@ func (t *Date) UnmarshalJSON(data []byte) error { | |||||||
| 	if err := json.Unmarshal(data, &str); err != nil { | 	if err := json.Unmarshal(data, &str); err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
| 	t0, err := time.Parse(t.FormatStr(), str) | 	return t.ParseString(str) | ||||||
| 	if err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
| 	t.Year = t0.Year() |  | ||||||
| 	t.Month = int(t0.Month()) |  | ||||||
| 	t.Day = t0.Day() |  | ||||||
| 	return nil |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) MarshalJSON() ([]byte, error) { | func (t Date) MarshalJSON() ([]byte, error) { | ||||||
| 	str := t.TimeUTC().Format(t.FormatStr()) | 	str := t.String() | ||||||
| 	return json.Marshal(str) | 	return json.Marshal(str) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) MarshalText() ([]byte, error) { | func (t Date) MarshalText() ([]byte, error) { | ||||||
| 	b := make([]byte, 0, len(t.FormatStr())) | 	return []byte(t.String()), nil | ||||||
| 	return t.TimeUTC().AppendFormat(b, t.FormatStr()), nil |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t *Date) UnmarshalText(data []byte) error { | func (t *Date) UnmarshalText(data []byte) error { | ||||||
| 	var err error | 	return t.ParseString(string(data)) | ||||||
| 	v, err := time.Parse(t.FormatStr(), string(data)) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
| 	t.Year = v.Year() |  | ||||||
| 	t.Month = int(v.Month()) |  | ||||||
| 	t.Day = v.Day() |  | ||||||
| 	return nil |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
| @@ -116,6 +102,13 @@ func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | |||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if tt == "" { | ||||||
|  | 		t.Year = 0 | ||||||
|  | 		t.Month = 0 | ||||||
|  | 		t.Day = 0 | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	v, err := time.Parse(t.FormatStr(), tt) | 	v, err := time.Parse(t.FormatStr(), tt) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| @@ -128,7 +121,10 @@ func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) MarshalBSONValue() (bsontype.Type, []byte, error) { | func (t Date) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
| 	return bson.MarshalValue(t.TimeUTC().Format(t.FormatStr())) | 	if t.IsZero() { | ||||||
|  | 		return bson.MarshalValue("") | ||||||
|  | 	} | ||||||
|  | 	return bson.MarshalValue(t.String()) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
| @@ -164,7 +160,7 @@ func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val | |||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) Serialize() string { | func (t Date) Serialize() string { | ||||||
| 	return t.TimeUTC().Format(t.FormatStr()) | 	return t.String() | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) FormatStr() string { | func (t Date) FormatStr() string { | ||||||
| @@ -212,11 +208,52 @@ func (t Date) Format(layout string) string { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) GoString() string { | func (t Date) GoString() string { | ||||||
| 	return t.TimeUTC().GoString() | 	return fmt.Sprintf("rfctime.Date{Year: %d, Month: %d, Day: %d}", t.Year, t.Month, t.Day) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (t Date) String() string { | func (t Date) String() string { | ||||||
| 	return t.TimeUTC().String() | 	return fmt.Sprintf("%04d-%02d-%02d", t.Year, t.Month, t.Day) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t *Date) ParseString(v string) error { | ||||||
|  | 	split := strings.Split(v, "-") | ||||||
|  | 	if len(split) != 3 { | ||||||
|  | 		return errors.New("invalid date format: " + v) | ||||||
|  | 	} | ||||||
|  | 	year, err := strconv.ParseInt(split[0], 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||||
|  | 	} | ||||||
|  | 	month, err := strconv.ParseInt(split[1], 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||||
|  | 	} | ||||||
|  | 	day, err := strconv.ParseInt(split[2], 10, 32) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": " + err.Error()) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if year < 0 { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": year is negative") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if month < 1 || month > 12 { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": month is out of range") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if day < 1 || day > 31 { | ||||||
|  | 		return errors.New("invalid date format: " + v + ": day is out of range") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	t.Year = int(year) | ||||||
|  | 	t.Month = int(month) | ||||||
|  | 	t.Day = int(day) | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Date) IsZero() bool { | ||||||
|  | 	return t.Year == 0 && t.Month == 0 && t.Day == 0 | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewDate(t time.Time) Date { | func NewDate(t time.Time) Date { | ||||||
|   | |||||||
| @@ -8,6 +8,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -245,6 +246,13 @@ func NewRFC3339(t time.Time) RFC3339Time { | |||||||
| 	return RFC3339Time(t) | 	return RFC3339Time(t) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func NewRFC3339Ptr(t *time.Time) *RFC3339Time { | ||||||
|  | 	if t == nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return langext.Ptr(RFC3339Time(*t)) | ||||||
|  | } | ||||||
|  |  | ||||||
| func NowRFC3339() RFC3339Time { | func NowRFC3339() RFC3339Time { | ||||||
| 	return RFC3339Time(time.Now()) | 	return RFC3339Time(time.Now()) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -8,6 +8,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -245,6 +246,13 @@ func NewRFC3339Nano(t time.Time) RFC3339NanoTime { | |||||||
| 	return RFC3339NanoTime(t) | 	return RFC3339NanoTime(t) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func NewRFC3339NanoPtr(t *time.Time) *RFC3339NanoTime { | ||||||
|  | 	if t == nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return langext.Ptr(RFC3339NanoTime(*t)) | ||||||
|  | } | ||||||
|  |  | ||||||
| func NowRFC3339Nano() RFC3339NanoTime { | func NowRFC3339Nano() RFC3339NanoTime { | ||||||
| 	return RFC3339NanoTime(time.Now()) | 	return RFC3339NanoTime(time.Now()) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -8,6 +8,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| @@ -239,6 +240,13 @@ func NewUnix(t time.Time) UnixTime { | |||||||
| 	return UnixTime(t) | 	return UnixTime(t) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func NewUnixPtr(t *time.Time) *UnixTime { | ||||||
|  | 	if t == nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return langext.Ptr(UnixTime(*t)) | ||||||
|  | } | ||||||
|  |  | ||||||
| func NowUnix() UnixTime { | func NowUnix() UnixTime { | ||||||
| 	return UnixTime(time.Now()) | 	return UnixTime(time.Now()) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -8,6 +8,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| @@ -239,6 +240,13 @@ func NewUnixMilli(t time.Time) UnixMilliTime { | |||||||
| 	return UnixMilliTime(t) | 	return UnixMilliTime(t) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func NewUnixMilliPtr(t *time.Time) *UnixMilliTime { | ||||||
|  | 	if t == nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return langext.Ptr(UnixMilliTime(*t)) | ||||||
|  | } | ||||||
|  |  | ||||||
| func NowUnixMilli() UnixMilliTime { | func NowUnixMilli() UnixMilliTime { | ||||||
| 	return UnixMilliTime(time.Now()) | 	return UnixMilliTime(time.Now()) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -8,6 +8,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsonrw" | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| @@ -239,6 +240,13 @@ func NewUnixNano(t time.Time) UnixNanoTime { | |||||||
| 	return UnixNanoTime(t) | 	return UnixNanoTime(t) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func NewUnixNanoPtr(t *time.Time) *UnixNanoTime { | ||||||
|  | 	if t == nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return langext.Ptr(UnixNanoTime(*t)) | ||||||
|  | } | ||||||
|  |  | ||||||
| func NowUnixNano() UnixNanoTime { | func NowUnixNano() UnixNanoTime { | ||||||
| 	return UnixNanoTime(time.Now()) | 	return UnixNanoTime(time.Now()) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -7,8 +7,6 @@ import ( | |||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||||
| 	"strconv" |  | ||||||
| 	"strings" |  | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -79,24 +77,12 @@ var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime | |||||||
| var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) { | var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) { | ||||||
| 	return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil | 	return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil | ||||||
| }, func(v string) (rfctime.Date, error) { | }, func(v string) (rfctime.Date, error) { | ||||||
| 	split := strings.Split(v, "-") | 	d := rfctime.Date{} | ||||||
| 	if len(split) != 3 { | 	if err := d.ParseString(v); err != nil { | ||||||
| 		return rfctime.Date{}, errors.New("invalid date format: " + v) | 		return rfctime.Date{}, err | ||||||
|  | 	} else { | ||||||
|  | 		return d, nil | ||||||
| 	} | 	} | ||||||
| 	year, err := strconv.ParseInt(split[0], 10, 32) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error()) |  | ||||||
| 	} |  | ||||||
| 	month, err := strconv.ParseInt(split[0], 10, 32) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error()) |  | ||||||
| 	} |  | ||||||
| 	day, err := strconv.ParseInt(split[0], 10, 32) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error()) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return rfctime.Date{Year: int(year), Month: int(month), Day: int(day)}, nil |  | ||||||
| }) | }) | ||||||
|  |  | ||||||
| var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) { | var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) { | ||||||
|   | |||||||
| @@ -47,3 +47,10 @@ func NewSimplePaginateFilter(filterClause string, filterParams PP, sort []Filter | |||||||
| 		}, | 		}, | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func NewEmptyPaginateFilter() PaginateFilter { | ||||||
|  | 	return genericPaginateFilter{ | ||||||
|  | 		sql:  func(params PP) (string, string, []string) { return "1=1", "", nil }, | ||||||
|  | 		sort: func() []FilterSort { return make([]FilterSort, 0) }, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										48
									
								
								sq/list.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								sq/list.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func Iterate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int, consumer func(ctx context.Context, v TData) error) (int, error) { | ||||||
|  | 	if filter == nil { | ||||||
|  | 		filter = NewEmptyPaginateFilter() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	prepParams := PP{} | ||||||
|  |  | ||||||
|  | 	sortOrder := filter.Sort() | ||||||
|  | 	sortCond := "" | ||||||
|  | 	if len(sortOrder) > 0 { | ||||||
|  | 		sortCond = "ORDER BY " | ||||||
|  | 		for i, v := range sortOrder { | ||||||
|  | 			if i > 0 { | ||||||
|  | 				sortCond += ", " | ||||||
|  | 			} | ||||||
|  | 			sortCond += v.Field + " " + string(v.Direction) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pageCond := "" | ||||||
|  | 	if limit != nil { | ||||||
|  | 		pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1))) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	filterCond, joinCond, joinTables := filter.SQL(prepParams) | ||||||
|  |  | ||||||
|  | 	selectCond := table + ".*" | ||||||
|  | 	for _, v := range joinTables { | ||||||
|  | 		selectCond += ", " + v + ".*" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond | ||||||
|  |  | ||||||
|  | 	rows, err := q.Query(ctx, sqlQueryData, prepParams) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return IterateAll[TData](ctx, q, rows, scanMode, scanSec, true, consumer) | ||||||
|  | } | ||||||
| @@ -34,7 +34,7 @@ type genListener struct { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (g genListener) PrePing(ctx context.Context) error { | func (g genListener) PrePing(ctx context.Context) error { | ||||||
| 	if g.prePing == nil { | 	if g.prePing != nil { | ||||||
| 		return g.prePing(ctx) | 		return g.prePing(ctx) | ||||||
| 	} else { | 	} else { | ||||||
| 		return nil | 		return nil | ||||||
| @@ -42,7 +42,7 @@ func (g genListener) PrePing(ctx context.Context) error { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error { | func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error { | ||||||
| 	if g.preTxBegin == nil { | 	if g.preTxBegin != nil { | ||||||
| 		return g.preTxBegin(ctx, txid) | 		return g.preTxBegin(ctx, txid) | ||||||
| 	} else { | 	} else { | ||||||
| 		return nil | 		return nil | ||||||
| @@ -50,7 +50,7 @@ func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (g genListener) PreTxCommit(txid uint16) error { | func (g genListener) PreTxCommit(txid uint16) error { | ||||||
| 	if g.preTxCommit == nil { | 	if g.preTxCommit != nil { | ||||||
| 		return g.preTxCommit(txid) | 		return g.preTxCommit(txid) | ||||||
| 	} else { | 	} else { | ||||||
| 		return nil | 		return nil | ||||||
| @@ -58,7 +58,7 @@ func (g genListener) PreTxCommit(txid uint16) error { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (g genListener) PreTxRollback(txid uint16) error { | func (g genListener) PreTxRollback(txid uint16) error { | ||||||
| 	if g.preTxRollback == nil { | 	if g.preTxRollback != nil { | ||||||
| 		return g.preTxRollback(txid) | 		return g.preTxRollback(txid) | ||||||
| 	} else { | 	} else { | ||||||
| 		return nil | 		return nil | ||||||
| @@ -66,7 +66,7 @@ func (g genListener) PreTxRollback(txid uint16) error { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error { | func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||||
| 	if g.preQuery == nil { | 	if g.preQuery != nil { | ||||||
| 		return g.preQuery(ctx, txID, sql, params) | 		return g.preQuery(ctx, txID, sql, params) | ||||||
| 	} else { | 	} else { | ||||||
| 		return nil | 		return nil | ||||||
| @@ -74,7 +74,7 @@ func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, pa | |||||||
| } | } | ||||||
|  |  | ||||||
| func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error { | func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||||
| 	if g.preExec == nil { | 	if g.preExec != nil { | ||||||
| 		return g.preExec(ctx, txID, sql, params) | 		return g.preExec(ctx, txID, sql, params) | ||||||
| 	} else { | 	} else { | ||||||
| 		return nil | 		return nil | ||||||
|   | |||||||
| @@ -9,6 +9,10 @@ import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||||
|  | 	if filter == nil { | ||||||
|  | 		filter = NewEmptyPaginateFilter() | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	prepParams := PP{} | 	prepParams := PP{} | ||||||
|  |  | ||||||
| 	sortOrder := filter.Sort() | 	sortOrder := filter.Sort() | ||||||
| @@ -90,6 +94,10 @@ func Paginate[TData any](ctx context.Context, q Queryable, table string, filter | |||||||
| } | } | ||||||
|  |  | ||||||
| func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | ||||||
|  | 	if filter == nil { | ||||||
|  | 		filter = NewEmptyPaginateFilter() | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	prepParams := PP{} | 	prepParams := PP{} | ||||||
|  |  | ||||||
| 	filterCond, joinCond, _ := filter.SQL(prepParams) | 	filterCond, joinCond, _ := filter.SQL(prepParams) | ||||||
|   | |||||||
| @@ -333,3 +333,79 @@ func ScanAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode | |||||||
| 	} | 	} | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func IterateAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool, consumer func(ctx context.Context, v TData) error) (int, error) { | ||||||
|  | 	var strscan *StructScanner | ||||||
|  |  | ||||||
|  | 	if sec == Safe { | ||||||
|  | 		strscan = NewStructScanner(rows, false) | ||||||
|  | 		var data TData | ||||||
|  | 		err := strscan.Start(&data) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return 0, err | ||||||
|  | 		} | ||||||
|  | 	} else if sec == Unsafe { | ||||||
|  | 		strscan = NewStructScanner(rows, true) | ||||||
|  | 		var data TData | ||||||
|  | 		err := strscan.Start(&data) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return 0, err | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		return 0, errors.New("unknown value for <sec>") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	rcount := 0 | ||||||
|  |  | ||||||
|  | 	for rows.Next() { | ||||||
|  |  | ||||||
|  | 		if err := ctx.Err(); err != nil { | ||||||
|  | 			return rcount, err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if mode == SModeFast { | ||||||
|  | 			var data TData | ||||||
|  | 			err := strscan.StructScanBase(&data) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return rcount, err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			err = consumer(ctx, data) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return rcount, exerr.Wrap(err, "").Build() | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			rcount++ | ||||||
|  |  | ||||||
|  | 		} else if mode == SModeExtended { | ||||||
|  | 			var data TData | ||||||
|  | 			err := strscan.StructScanExt(q, &data) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return rcount, err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			err = consumer(ctx, data) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return rcount, exerr.Wrap(err, "").Build() | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			rcount++ | ||||||
|  |  | ||||||
|  | 		} else { | ||||||
|  | 			return rcount, errors.New("unknown value for <mode>") | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if close { | ||||||
|  | 		err := strscan.rows.Close() | ||||||
|  | 		if err != nil { | ||||||
|  | 			return rcount, err | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if err := rows.Err(); err != nil { | ||||||
|  | 		return rcount, err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return rcount, nil | ||||||
|  | } | ||||||
|   | |||||||
| @@ -146,3 +146,37 @@ func UnixFloatSeconds(v float64) time.Time { | |||||||
| func FloorTime(t time.Time) time.Time { | func FloorTime(t time.Time) time.Time { | ||||||
| 	return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) | 	return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func SubtractYears(t time.Time, yearCount float64, tz *time.Location) time.Time { | ||||||
|  | 	t = t.In(tz) | ||||||
|  |  | ||||||
|  | 	if yearCount < 0 { | ||||||
|  | 		return AddYears(t, -yearCount, tz) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	intCount, floatCount := math.Modf(yearCount) | ||||||
|  |  | ||||||
|  | 	t = t.AddDate(-int(intCount), 0, 0) | ||||||
|  |  | ||||||
|  | 	t0 := TimeToYearStart(t, tz) | ||||||
|  | 	t1 := TimeToYearEnd(t, tz) | ||||||
|  |  | ||||||
|  | 	return t.Add(time.Duration(float64(t1.Sub(t0)) * floatCount * -1)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func AddYears(t time.Time, yearCount float64, tz *time.Location) time.Time { | ||||||
|  | 	t = t.In(tz) | ||||||
|  |  | ||||||
|  | 	if yearCount < 0 { | ||||||
|  | 		return SubtractYears(t, -yearCount, tz) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	intCount, floatCount := math.Modf(yearCount) | ||||||
|  |  | ||||||
|  | 	t = t.AddDate(int(intCount), 0, 0) | ||||||
|  |  | ||||||
|  | 	t0 := TimeToYearStart(t, tz) | ||||||
|  | 	t1 := TimeToYearEnd(t, tz) | ||||||
|  |  | ||||||
|  | 	return t.Add(time.Duration(float64(t1.Sub(t0)) * floatCount)) | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										158
									
								
								timeext/time_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										158
									
								
								timeext/time_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,158 @@ | |||||||
|  | package timeext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestTimeToDayStart(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	tm := time.Date(2022, 1, 1, 13, 14, 15, 0, tz) | ||||||
|  | 	expected := time.Date(2022, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	result := TimeToDayStart(tm, tz) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestTimeToDayEnd(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	tm := time.Date(2022, 1, 1, 13, 14, 15, 0, tz) | ||||||
|  | 	expected := time.Date(2022, 1, 2, 0, 0, 0, 0, tz).Add(-1) | ||||||
|  | 	result := TimeToDayEnd(tm, tz) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestIsSameDayIncludingDateBoundaries(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	t1 := time.Date(2022, 1, 1, 23, 59, 59, 0, tz) | ||||||
|  | 	t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, tz) | ||||||
|  | 	if !IsSameDayIncludingDateBoundaries(t1, t2, tz) { | ||||||
|  | 		t.Errorf("Expected %v and %v to be the same day", t1, t2) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestIsDatePartEqual(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	t1 := time.Date(2022, 1, 1, 23, 59, 59, 0, tz) | ||||||
|  | 	t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	if !IsDatePartEqual(t1, t2, tz) { | ||||||
|  | 		t.Errorf("Expected %v and %v to have the same date part", t1, t2) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestWithTimePart(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	base := time.Date(2022, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	expected := time.Date(2022, 1, 1, 13, 14, 15, 0, tz) | ||||||
|  | 	result := WithTimePart(base, 13, 14, 15) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestCombineDateAndTime(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	d := time.Date(2022, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	tm := time.Date(0, 0, 0, 13, 14, 15, 0, tz) | ||||||
|  | 	expected := time.Date(2022, 1, 1, 13, 14, 15, 0, tz) | ||||||
|  | 	result := CombineDateAndTime(d, tm) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestIsSunday(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	tm := time.Date(2022, 1, 2, 0, 0, 0, 0, tz) // 2nd January 2022 is a Sunday | ||||||
|  | 	if !IsSunday(tm, tz) { | ||||||
|  | 		t.Errorf("Expected %v to be a Sunday", tm) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestDurationFromTime(t *testing.T) { | ||||||
|  | 	expected := time.Duration(13*time.Hour + 14*time.Minute + 15*time.Second) | ||||||
|  | 	result := DurationFromTime(13, 14, 15) | ||||||
|  | 	if result != expected { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestMin(t *testing.T) { | ||||||
|  | 	t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||||
|  | 	t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, time.UTC) | ||||||
|  | 	expected := t1 | ||||||
|  | 	result := Min(t1, t2) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestMax(t *testing.T) { | ||||||
|  | 	t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||||
|  | 	t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, time.UTC) | ||||||
|  | 	expected := t2 | ||||||
|  | 	result := Max(t1, t2) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestUnixFloatSeconds(t *testing.T) { | ||||||
|  | 	v := 1640995200.0 // 1st January 2022 00:00:00 UTC in Unix timestamp | ||||||
|  | 	expected := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||||
|  | 	result := UnixFloatSeconds(v) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestFloorTime(t *testing.T) { | ||||||
|  | 	tm := time.Date(2022, 1, 1, 13, 14, 15, 0, time.UTC) | ||||||
|  | 	expected := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||||
|  | 	result := FloorTime(tm) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSubtractYears(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	tm := time.Date(2022, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  |  | ||||||
|  | 	expected := time.Date(2021, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	result := SubtractYears(tm, 1, tz) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	expected = time.Date(2020, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	result = SubtractYears(tm, 2, tz) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	expected = time.Date(2019, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	result = SubtractYears(tm, 3, tz) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	expected = time.Date(2025, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	result = SubtractYears(tm, -3, tz) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAddYears(t *testing.T) { | ||||||
|  | 	tz := TimezoneBerlin | ||||||
|  | 	tm := time.Date(2022, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	expected := time.Date(2023, 1, 1, 0, 0, 0, 0, tz) | ||||||
|  | 	result := AddYears(tm, 1, tz) | ||||||
|  | 	if !result.Equal(expected) { | ||||||
|  | 		t.Errorf("Expected %v but got %v", expected, result) | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -52,6 +52,7 @@ type Coll[TData any] struct { | |||||||
| 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | ||||||
| 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | ||||||
| 	unmarshalHooks      []func(d TData) TData                                    // called for every object after unmarshalling | 	unmarshalHooks      []func(d TData) TData                                    // called for every object after unmarshalling | ||||||
|  | 	marshalHooks        []func(d TData) TData                                    // called for every object before marshalling | ||||||
| 	extraModPipeline    []func(ctx context.Context) mongo.Pipeline               // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | 	extraModPipeline    []func(ctx context.Context) mongo.Pipeline               // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -83,18 +84,32 @@ func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable | |||||||
| 	return c | 	return c | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // WithUnmarshalHook | ||||||
|  | // function that is called for every object after reading from DB | ||||||
| func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] { | func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] { | ||||||
| 	c.unmarshalHooks = append(c.unmarshalHooks, fn) | 	c.unmarshalHooks = append(c.unmarshalHooks, fn) | ||||||
|  |  | ||||||
| 	return c | 	return c | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // WithMarshalHook | ||||||
|  | // function that is called for every object before writing to DB | ||||||
|  | func (c *Coll[TData]) WithMarshalHook(fn func(d TData) TData) *Coll[TData] { | ||||||
|  | 	c.marshalHooks = append(c.marshalHooks, fn) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // WithModifyingPipeline | ||||||
|  | // pipeline that is appended to all read operations (after filtering) | ||||||
| func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] { | func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] { | ||||||
| 	c.extraModPipeline = append(c.extraModPipeline, func(ctx context.Context) mongo.Pipeline { return p }) | 	c.extraModPipeline = append(c.extraModPipeline, func(ctx context.Context) mongo.Pipeline { return p }) | ||||||
|  |  | ||||||
| 	return c | 	return c | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // WithModifyingPipelineFunc | ||||||
|  | // pipeline that is appended to all read operations (after filtering) | ||||||
| func (c *Coll[TData]) WithModifyingPipelineFunc(fn func(ctx context.Context) mongo.Pipeline) *Coll[TData] { | func (c *Coll[TData]) WithModifyingPipelineFunc(fn func(ctx context.Context) mongo.Pipeline) *Coll[TData] { | ||||||
| 	c.extraModPipeline = append(c.extraModPipeline, fn) | 	c.extraModPipeline = append(c.extraModPipeline, fn) | ||||||
|  |  | ||||||
| @@ -125,3 +140,17 @@ func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirecti | |||||||
| 		Extra:          ct.Extra{}, | 		Extra:          ct.Extra{}, | ||||||
| 	}, nil | 	}, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool { | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		for _, stage := range ppl(ctx) { | ||||||
|  | 			for _, bsone := range stage { | ||||||
|  | 				if bsone.Key == "$group" { | ||||||
|  | 					// a group stage in extraModPipeline results in unsorted data, which means the caller must sort again after these pipeline stages... | ||||||
|  | 					return true | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return false | ||||||
|  | } | ||||||
|   | |||||||
| @@ -36,6 +36,14 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options. | |||||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if c.needsDoubleSort(ctx) { | ||||||
|  | 		for _, opt := range opts { | ||||||
|  | 			if opt != nil && opt.Sort != nil { | ||||||
|  | 				pipeline = append(pipeline, bson.D{{Key: "$sort", Value: opt.Sort}}) | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	for _, opt := range opts { | 	for _, opt := range opts { | ||||||
| 		if opt != nil && opt.Projection != nil { | 		if opt != nil && opt.Projection != nil { | ||||||
| 			pipeline = append(pipeline, bson.D{{Key: "$project", Value: opt.Projection}}) | 			pipeline = append(pipeline, bson.D{{Key: "$project", Value: opt.Projection}}) | ||||||
|   | |||||||
| @@ -9,6 +9,10 @@ import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) { | func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) { | ||||||
|  | 	for _, hook := range c.marshalHooks { | ||||||
|  | 		valueIn = hook(valueIn) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	insRes, err := c.coll.InsertOne(ctx, valueIn) | 	insRes, err := c.coll.InsertOne(ctx, valueIn) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
| @@ -36,6 +40,12 @@ func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TDat | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | ||||||
|  | 	for _, hook := range c.marshalHooks { | ||||||
|  | 		for i := 0; i < len(valueIn); i++ { | ||||||
|  | 			valueIn[i] = hook(valueIn[i]) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) | 	insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build() | ||||||
|   | |||||||
| @@ -14,6 +14,10 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 		return make([]TData, 0), ct.End(), nil | 		return make([]TData, 0), ct.End(), nil | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if pageSize != nil && *pageSize == 0 { | ||||||
|  | 		return make([]TData, 0), inTok, nil // fast track, we return an empty list and do not advance the cursor token | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	pipeline := mongo.Pipeline{} | 	pipeline := mongo.Pipeline{} | ||||||
| 	pf1 := "_id" | 	pf1 := "_id" | ||||||
| 	pd1 := ct.SortASC | 	pd1 := ct.SortASC | ||||||
| @@ -35,7 +39,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 		sortDirSecondary = nil | 		sortDirSecondary = nil | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | 	paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr. | 		return nil, ct.CursorToken{}, exerr. | ||||||
| 			Wrap(err, "failed to create pagination"). | 			Wrap(err, "failed to create pagination"). | ||||||
| @@ -56,6 +60,10 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if c.needsDoubleSort(ctx) { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, doubleSortPipeline) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | 		return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
| @@ -136,14 +144,50 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS | |||||||
| 	return data, token, count, nil | 	return data, token, count, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { | func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]string, error) { | ||||||
|  | 	type idObject struct { | ||||||
|  | 		ID string `bson:"_id"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineFilter := mongo.Pipeline{} | ||||||
|  |  | ||||||
|  | 	if filter != nil { | ||||||
|  | 		pipelineFilter = filter.FilterQuery() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	extrModPipelineResolved := mongo.Pipeline{} | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineProjectIDs := mongo.Pipeline{} | ||||||
|  | 	pipelineProjectIDs = append(pipelineProjectIDs, bson.D{{Key: "$project", Value: bson.M{"_id": 1}}}) | ||||||
|  |  | ||||||
|  | 	pipelineList := langext.ArrConcat(pipelineFilter, extrModPipelineResolved, pipelineProjectIDs) | ||||||
|  |  | ||||||
|  | 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var res []idObject | ||||||
|  |  | ||||||
|  | 	err = cursorList.All(ctx, &res) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "failed to decode entities").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) { | ||||||
|  |  | ||||||
| 	cond := bson.A{} | 	cond := bson.A{} | ||||||
| 	sort := bson.D{} | 	sort := bson.D{} | ||||||
|  |  | ||||||
| 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) | 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() | 		return nil, nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if sortPrimary == ct.SortASC { | 	if sortPrimary == ct.SortASC { | ||||||
| @@ -160,7 +204,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | |||||||
|  |  | ||||||
| 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) | 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() | 			return nil, nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		if *sortSecondary == ct.SortASC { | 		if *sortSecondary == ct.SortASC { | ||||||
| @@ -171,7 +215,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | |||||||
| 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, | 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, | ||||||
| 			}}) | 			}}) | ||||||
|  |  | ||||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | 			sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1}) | ||||||
|  |  | ||||||
| 		} else if *sortSecondary == ct.SortDESC { | 		} else if *sortSecondary == ct.SortDESC { | ||||||
|  |  | ||||||
| @@ -181,7 +225,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | |||||||
| 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, | 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, | ||||||
| 			}}) | 			}}) | ||||||
|  |  | ||||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | 			sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1}) | ||||||
|  |  | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| @@ -203,15 +247,17 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | |||||||
|  |  | ||||||
| 	} else { | 	} else { | ||||||
|  |  | ||||||
| 		return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() | 		return nil, nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) | 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) | ||||||
|  |  | ||||||
|  | 	pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}} | ||||||
|  |  | ||||||
| 	if pageSize != nil { | 	if pageSize != nil { | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) | 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return pipeline, nil | 	return pipeline, pipelineSort, nil | ||||||
| } | } | ||||||
|   | |||||||
| @@ -61,6 +61,10 @@ func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, update | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error { | func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error { | ||||||
|  | 	for _, hook := range c.marshalHooks { | ||||||
|  | 		value = hook(value) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	_, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value}) | 	_, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value}) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return exerr.Wrap(err, "mongo-query[replace-one] failed"). | 		return exerr.Wrap(err, "mongo-query[replace-one] failed"). | ||||||
| @@ -73,6 +77,10 @@ func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) { | func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) { | ||||||
|  | 	for _, hook := range c.marshalHooks { | ||||||
|  | 		value = hook(value) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	mongoRes := c.coll.FindOneAndReplace(ctx, filterQuery, value, options.FindOneAndReplace().SetReturnDocument(options.After)) | 	mongoRes := c.coll.FindOneAndReplace(ctx, filterQuery, value, options.FindOneAndReplace().SetReturnDocument(options.After)) | ||||||
| 	if err := mongoRes.Err(); err != nil { | 	if err := mongoRes.Err(); err != nil { | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-and-update] failed"). | 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-and-update] failed"). | ||||||
|   | |||||||
							
								
								
									
										9
									
								
								wpdf/utils.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								wpdf/utils.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | |||||||
|  | package wpdf | ||||||
|  |  | ||||||
|  | func hexToColor(c uint32) PDFColor { | ||||||
|  | 	return PDFColor{R: int((c >> 16) & 0xFF), G: int((c >> 8) & 0xFF), B: int((c >> 0) & 0xFF)} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func rgbToColor(r, g, b int) PDFColor { | ||||||
|  | 	return PDFColor{R: r, G: g, B: b} | ||||||
|  | } | ||||||
							
								
								
									
										197
									
								
								wpdf/wpdf.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										197
									
								
								wpdf/wpdf.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,197 @@ | |||||||
|  | package wpdf | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"github.com/jung-kurt/gofpdf" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type WPDFBuilder struct { | ||||||
|  | 	b           *gofpdf.Fpdf | ||||||
|  | 	tr          func(string) string | ||||||
|  | 	cellHeight  float64 | ||||||
|  | 	cellSpacing float64 | ||||||
|  | 	fontName    PDFFontFamily | ||||||
|  | 	fontStyle   PDFFontStyle | ||||||
|  | 	fontSize    float64 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type PDFMargins struct { | ||||||
|  | 	Left  float64 | ||||||
|  | 	Top   float64 | ||||||
|  | 	Right float64 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type PDFColor struct { | ||||||
|  | 	R int | ||||||
|  | 	G int | ||||||
|  | 	B int | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPDFBuilder(orientation PDFOrientation, size PDFSize, unicode bool) *WPDFBuilder { | ||||||
|  |  | ||||||
|  | 	fpdfbuilder := gofpdf.New(string(orientation), "mm", string(size), "") | ||||||
|  |  | ||||||
|  | 	var tr func(string) string | ||||||
|  | 	if unicode { | ||||||
|  | 		tr = fpdfbuilder.UnicodeTranslatorFromDescriptor("") | ||||||
|  | 	} else { | ||||||
|  | 		tr = func(s string) string { return s } | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b := &WPDFBuilder{ | ||||||
|  | 		b:           fpdfbuilder, | ||||||
|  | 		tr:          tr, | ||||||
|  | 		cellHeight:  5, | ||||||
|  | 		cellSpacing: 1, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b.SetMargins(PDFMargins{Left: 15, Top: 25, Right: 15}) // default values | ||||||
|  | 	b.SetFont(FontHelvetica, Normal, 12)                   // ensures font is set | ||||||
|  |  | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) FPDF() *gofpdf.Fpdf { | ||||||
|  | 	return b.b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetMargins(v PDFMargins) { | ||||||
|  | 	b.b.SetMargins(v.Left, v.Top, v.Right) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) AddPage() { | ||||||
|  | 	b.b.AddPage() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetTextColor(cr, cg, cb int) { | ||||||
|  | 	b.b.SetTextColor(cr, cg, cb) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetTextColor() (cr, cg, cb int) { | ||||||
|  | 	return b.b.GetTextColor() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetDrawColor(cr, cg, cb int) { | ||||||
|  | 	b.b.SetDrawColor(cr, cg, cb) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetDrawColor() (cr, cg, cb int) { | ||||||
|  | 	return b.b.GetDrawColor() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetFillColor(cr, cg, cb int) { | ||||||
|  | 	b.b.SetFillColor(cr, cg, cb) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetFillColor() (cr, cg, cb int) { | ||||||
|  | 	return b.b.GetFillColor() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetLineWidth(w float64) { | ||||||
|  | 	b.b.SetLineWidth(w) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetLineWidth() float64 { | ||||||
|  | 	return b.b.GetLineWidth() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetFont(fontName PDFFontFamily, fontStyle PDFFontStyle, fontSize float64) { | ||||||
|  | 	b.b.SetFont(string(fontName), string(fontStyle), fontSize) | ||||||
|  |  | ||||||
|  | 	b.fontName = fontName | ||||||
|  | 	b.fontStyle = fontStyle | ||||||
|  | 	b.fontSize = fontSize | ||||||
|  |  | ||||||
|  | 	b.cellHeight = b.b.PointConvert(fontSize) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetCellSpacing(h float64) { | ||||||
|  | 	b.cellSpacing = h | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) Ln(h float64) { | ||||||
|  | 	b.b.Ln(h) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) Build() ([]byte, error) { | ||||||
|  | 	buf := new(bytes.Buffer) | ||||||
|  | 	err := b.b.Output(buf) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, err | ||||||
|  | 	} | ||||||
|  | 	return buf.Bytes(), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetX(x float64) { | ||||||
|  | 	b.b.SetX(x) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) IncX(dx float64) { | ||||||
|  | 	b.b.SetX(b.b.GetX() + dx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetX() float64 { | ||||||
|  | 	return b.b.GetX() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetY(y float64) { | ||||||
|  | 	b.b.SetY(y) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetY() float64 { | ||||||
|  | 	return b.b.GetY() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) SetXY(x float64, y float64) { | ||||||
|  | 	b.b.SetXY(x, y) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetXY() (x float64, y float64) { | ||||||
|  | 	return b.b.GetXY() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetMargins() (left, top, right, bottom float64) { | ||||||
|  | 	return b.b.GetMargins() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetMarginLeft() float64 { | ||||||
|  | 	v, _, _, _ := b.b.GetMargins() | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetMarginTop() float64 { | ||||||
|  | 	_, v, _, _ := b.b.GetMargins() | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetMarginRight() float64 { | ||||||
|  | 	_, _, v, _ := b.b.GetMargins() | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetMarginBottom() float64 { | ||||||
|  | 	_, _, _, v := b.b.GetMargins() | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetPageSize() (width, height float64) { | ||||||
|  | 	return b.b.GetPageSize() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetPageWidth() float64 { | ||||||
|  | 	v, _ := b.b.GetPageSize() | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetPageHeight() float64 { | ||||||
|  | 	_, v := b.b.GetPageSize() | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetWorkAreaWidth() float64 { | ||||||
|  | 	return b.GetPageWidth() - b.GetMarginLeft() - b.GetMarginRight() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) GetStringWidth(str string) float64 { | ||||||
|  | 	return b.b.GetStringWidth(str) | ||||||
|  | } | ||||||
							
								
								
									
										238
									
								
								wpdf/wpdfCell.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										238
									
								
								wpdf/wpdfCell.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,238 @@ | |||||||
|  | package wpdf | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  |  | ||||||
|  | type PDFCellOpt struct { | ||||||
|  | 	width             *float64 | ||||||
|  | 	height            *float64 | ||||||
|  | 	border            *PDFBorder | ||||||
|  | 	ln                *PDFTextBreak | ||||||
|  | 	align             *PDFTextAlign | ||||||
|  | 	fill              *bool | ||||||
|  | 	link              *int | ||||||
|  | 	linkStr           *string | ||||||
|  | 	fontNameOverride  *PDFFontFamily | ||||||
|  | 	fontStyleOverride *PDFFontStyle | ||||||
|  | 	fontSizeOverride  *float64 | ||||||
|  | 	extraLn           *float64 | ||||||
|  | 	x                 *float64 | ||||||
|  | 	autoWidth         *bool | ||||||
|  | 	textColor         *PDFColor | ||||||
|  | 	borderColor       *PDFColor | ||||||
|  | 	fillColor         *PDFColor | ||||||
|  | 	autoWidthPaddingX *float64 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPDFCellOpt() *PDFCellOpt { | ||||||
|  | 	return &PDFCellOpt{} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) Width(v float64) *PDFCellOpt { | ||||||
|  | 	opt.width = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) Height(v float64) *PDFCellOpt { | ||||||
|  | 	opt.height = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) Border(v PDFBorder) *PDFCellOpt { | ||||||
|  | 	opt.border = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) LnPos(v PDFTextBreak) *PDFCellOpt { | ||||||
|  | 	opt.ln = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) Align(v PDFTextAlign) *PDFCellOpt { | ||||||
|  | 	opt.align = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) FillBackground(v bool) *PDFCellOpt { | ||||||
|  | 	opt.fill = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) Link(v int) *PDFCellOpt { | ||||||
|  | 	opt.link = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) LinkStr(v string) *PDFCellOpt { | ||||||
|  | 	opt.linkStr = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) Font(fontName PDFFontFamily, fontStyle PDFFontStyle, fontSize float64) *PDFCellOpt { | ||||||
|  | 	opt.fontNameOverride = &fontName | ||||||
|  | 	opt.fontStyleOverride = &fontStyle | ||||||
|  | 	opt.fontSizeOverride = &fontSize | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) FontName(v PDFFontFamily) *PDFCellOpt { | ||||||
|  | 	opt.fontNameOverride = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) FontStyle(v PDFFontStyle) *PDFCellOpt { | ||||||
|  | 	opt.fontStyleOverride = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) FontSize(v float64) *PDFCellOpt { | ||||||
|  | 	opt.fontSizeOverride = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) Bold() *PDFCellOpt { | ||||||
|  | 	opt.fontStyleOverride = langext.Ptr(Bold) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) Italic() *PDFCellOpt { | ||||||
|  | 	opt.fontStyleOverride = langext.Ptr(Italic) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) LnAfter(v float64) *PDFCellOpt { | ||||||
|  | 	opt.extraLn = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) X(v float64) *PDFCellOpt { | ||||||
|  | 	opt.x = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) AutoWidth() *PDFCellOpt { | ||||||
|  | 	opt.autoWidth = langext.PTrue | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) AutoWidthPaddingX(v float64) *PDFCellOpt { | ||||||
|  | 	opt.autoWidthPaddingX = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) TextColor(cr, cg, cb int) *PDFCellOpt { | ||||||
|  | 	opt.textColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) TextColorHex(c uint32) *PDFCellOpt { | ||||||
|  | 	opt.textColor = langext.Ptr(hexToColor(c)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) BorderColor(cr, cg, cb int) *PDFCellOpt { | ||||||
|  | 	opt.borderColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) BorderColorHex(c uint32) *PDFCellOpt { | ||||||
|  | 	opt.borderColor = langext.Ptr(hexToColor(c)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) FillColor(cr, cg, cb int) *PDFCellOpt { | ||||||
|  | 	opt.fillColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFCellOpt) FillColorHex(c uint32) *PDFCellOpt { | ||||||
|  | 	opt.fillColor = langext.Ptr(hexToColor(c)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) { | ||||||
|  |  | ||||||
|  | 	txtTR := b.tr(txt) | ||||||
|  |  | ||||||
|  | 	width := float64(0) | ||||||
|  | 	height := b.cellHeight + b.cellSpacing | ||||||
|  | 	border := BorderNone | ||||||
|  | 	ln := BreakToNextLine | ||||||
|  | 	align := AlignLeft | ||||||
|  | 	fill := false | ||||||
|  | 	link := 0 | ||||||
|  | 	linkStr := "" | ||||||
|  | 	var fontNameOverride *PDFFontFamily | ||||||
|  | 	var fontStyleOverride *PDFFontStyle | ||||||
|  | 	var fontSizeOverride *float64 | ||||||
|  | 	extraLn := float64(0) | ||||||
|  | 	var x *float64 | ||||||
|  | 	autoWidth := false | ||||||
|  | 	var textColor *PDFColor | ||||||
|  | 	var borderColor *PDFColor | ||||||
|  | 	var fillColor *PDFColor | ||||||
|  | 	autoWidthPaddingX := float64(0) | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		width = langext.Coalesce(opt.width, width) | ||||||
|  | 		height = langext.Coalesce(opt.height, height) | ||||||
|  | 		border = langext.Coalesce(opt.border, border) | ||||||
|  | 		ln = langext.Coalesce(opt.ln, ln) | ||||||
|  | 		align = langext.Coalesce(opt.align, align) | ||||||
|  | 		fill = langext.Coalesce(opt.fill, fill) | ||||||
|  | 		link = langext.Coalesce(opt.link, link) | ||||||
|  | 		linkStr = langext.Coalesce(opt.linkStr, linkStr) | ||||||
|  | 		fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride) | ||||||
|  | 		fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride) | ||||||
|  | 		fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride) | ||||||
|  | 		extraLn = langext.Coalesce(opt.extraLn, extraLn) | ||||||
|  | 		x = langext.CoalesceOpt(opt.x, x) | ||||||
|  | 		autoWidth = langext.Coalesce(opt.autoWidth, autoWidth) | ||||||
|  | 		textColor = langext.CoalesceOpt(opt.textColor, textColor) | ||||||
|  | 		borderColor = langext.CoalesceOpt(opt.borderColor, borderColor) | ||||||
|  | 		fillColor = langext.CoalesceOpt(opt.fillColor, fillColor) | ||||||
|  | 		autoWidthPaddingX = langext.Coalesce(opt.autoWidthPaddingX, autoWidthPaddingX) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil { | ||||||
|  | 		oldFontName := b.fontName | ||||||
|  | 		oldFontStyle := b.fontStyle | ||||||
|  | 		oldFontSize := b.fontSize | ||||||
|  | 		newFontName := langext.Coalesce(fontNameOverride, oldFontName) | ||||||
|  | 		newFontStyle := langext.Coalesce(fontStyleOverride, oldFontStyle) | ||||||
|  | 		newFontSize := langext.Coalesce(fontSizeOverride, oldFontSize) | ||||||
|  | 		b.SetFont(newFontName, newFontStyle, newFontSize) | ||||||
|  | 		defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if textColor != nil { | ||||||
|  | 		oldColorR, oldColorG, oldColorB := b.b.GetTextColor() | ||||||
|  | 		b.SetTextColor(textColor.R, textColor.G, textColor.B) | ||||||
|  | 		defer func() { b.SetTextColor(oldColorR, oldColorG, oldColorB) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if borderColor != nil { | ||||||
|  | 		oldColorR, oldColorG, oldColorB := b.b.GetDrawColor() | ||||||
|  | 		b.SetDrawColor(borderColor.R, borderColor.G, borderColor.B) | ||||||
|  | 		defer func() { b.SetDrawColor(oldColorR, oldColorG, oldColorB) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fillColor != nil { | ||||||
|  | 		oldColorR, oldColorG, oldColorB := b.b.GetFillColor() | ||||||
|  | 		b.SetFillColor(fillColor.R, fillColor.G, fillColor.B) | ||||||
|  | 		defer func() { b.SetFillColor(oldColorR, oldColorG, oldColorB) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if x != nil { | ||||||
|  | 		b.b.SetX(*x) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if autoWidth { | ||||||
|  | 		width = b.b.GetStringWidth(txtTR) + autoWidthPaddingX | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b.b.CellFormat(width, height, txtTR, string(border), int(ln), string(align), fill, link, linkStr) | ||||||
|  |  | ||||||
|  | 	if extraLn != 0 { | ||||||
|  | 		b.b.Ln(extraLn) | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										80
									
								
								wpdf/wpdfConstants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								wpdf/wpdfConstants.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | package wpdf | ||||||
|  |  | ||||||
|  | type PDFOrientation string | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	Portrait  PDFOrientation = "P" | ||||||
|  | 	Landscape PDFOrientation = "L" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PDFSize string | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	SizeA3      PDFSize = "A3" | ||||||
|  | 	SizeA4      PDFSize = "A4" | ||||||
|  | 	SizeA5      PDFSize = "A4" | ||||||
|  | 	SizeLetter  PDFSize = "Letter" | ||||||
|  | 	SizeLegal   PDFSize = "Legal" | ||||||
|  | 	SizeTabloid PDFSize = "Tabloid" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PDFFontFamily string | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	FontCourier      PDFFontFamily = "courier" | ||||||
|  | 	FontHelvetica    PDFFontFamily = "helvetica" | ||||||
|  | 	FontTimes        PDFFontFamily = "times" | ||||||
|  | 	FontZapfDingbats PDFFontFamily = "zapfdingbats" | ||||||
|  | 	FontSymbol       PDFFontFamily = "symbol" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PDFFontStyle string | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	Normal     PDFFontStyle = "" | ||||||
|  | 	Bold       PDFFontStyle = "B" | ||||||
|  | 	Italic     PDFFontStyle = "I" | ||||||
|  | 	BoldItalic PDFFontStyle = "IB" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PDFBorder string | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	BorderNone   PDFBorder = "" | ||||||
|  | 	BorderFull   PDFBorder = "1" | ||||||
|  | 	BorderLeft   PDFBorder = "L" | ||||||
|  | 	BorderTop    PDFBorder = "T" | ||||||
|  | 	BorderRight  PDFBorder = "R" | ||||||
|  | 	BorderBottom PDFBorder = "B" | ||||||
|  | 	BorderTLR    PDFBorder = "TLR" | ||||||
|  | 	BorderLR     PDFBorder = "LR" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PDFTextBreak int | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	BreakToRight    PDFTextBreak = 0 | ||||||
|  | 	BreakToNextLine PDFTextBreak = 1 | ||||||
|  | 	BreakToBelow    PDFTextBreak = 2 | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PDFTextAlign string | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	AlignLeft       PDFTextAlign = "L" | ||||||
|  | 	AlignHorzCenter PDFTextAlign = "C" | ||||||
|  | 	AlignRight      PDFTextAlign = "R" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PDFRectStyle string | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	RectFill        PDFRectStyle = "F" | ||||||
|  | 	RectOutline     PDFRectStyle = "D" | ||||||
|  | 	RectFillOutline PDFRectStyle = "FD" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	BackgroundFill        = true | ||||||
|  | 	BackgroundTransparent = false | ||||||
|  | ) | ||||||
							
								
								
									
										315
									
								
								wpdf/wpdfImage.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										315
									
								
								wpdf/wpdfImage.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,315 @@ | |||||||
|  | package wpdf | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"github.com/jung-kurt/gofpdf" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/imageext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"image" | ||||||
|  | 	"image/color" | ||||||
|  | 	"net/http" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PDFImageRef struct { | ||||||
|  | 	Info  *gofpdf.ImageInfoType | ||||||
|  | 	Name  string | ||||||
|  | 	Bin   []byte | ||||||
|  | 	Image *image.Image | ||||||
|  | 	Mime  string | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type PDFImageRegisterOpt struct { | ||||||
|  | 	imageType             *string | ||||||
|  | 	readDpi               *bool | ||||||
|  | 	allowNegativePosition *bool | ||||||
|  | 	name                  *string | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPDFImageRegisterOpt() *PDFImageRegisterOpt { | ||||||
|  | 	return &PDFImageRegisterOpt{} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageRegisterOpt) ImageType(v string) *PDFImageRegisterOpt { | ||||||
|  | 	opt.imageType = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageRegisterOpt) ReadDpi(v bool) *PDFImageRegisterOpt { | ||||||
|  | 	opt.readDpi = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageRegisterOpt) AllowNegativePosition(v bool) *PDFImageRegisterOpt { | ||||||
|  | 	opt.allowNegativePosition = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageRegisterOpt) Name(v string) *PDFImageRegisterOpt { | ||||||
|  | 	opt.name = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) RegisterImage(bin []byte, opts ...*PDFImageRegisterOpt) *PDFImageRef { | ||||||
|  | 	imgName := "fpdf_img_" + langext.MustRawHexUUID() | ||||||
|  | 	imageType := "" | ||||||
|  | 	readDpi := false | ||||||
|  | 	allowNegativePosition := false | ||||||
|  | 	mime := "application/octet-stream" | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		imageType = langext.Coalesce(opt.imageType, imageType) | ||||||
|  | 		readDpi = langext.Coalesce(opt.readDpi, readDpi) | ||||||
|  | 		allowNegativePosition = langext.Coalesce(opt.allowNegativePosition, allowNegativePosition) | ||||||
|  | 		imgName = langext.Coalesce(opt.name, imgName) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if imageType == "" { | ||||||
|  | 		ct := http.DetectContentType(bin[:512]) | ||||||
|  | 		switch ct { | ||||||
|  | 		case "image/jpg": | ||||||
|  | 			imageType = "JPG" | ||||||
|  | 			mime = ct | ||||||
|  | 		case "image/jpeg": | ||||||
|  | 			imageType = "JPEG" | ||||||
|  | 			mime = ct | ||||||
|  | 		case "image/png": | ||||||
|  | 			imageType = "PNG" | ||||||
|  | 			mime = ct | ||||||
|  | 		case "image/gif": | ||||||
|  | 			imageType = "GIF" | ||||||
|  | 			mime = ct | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		switch imageType { | ||||||
|  | 		case "JPG": | ||||||
|  | 		case "JPEG": | ||||||
|  | 			mime = "image/jpeg" | ||||||
|  | 		case "PNG": | ||||||
|  | 			mime = "image/png" | ||||||
|  | 		case "GIF": | ||||||
|  | 			mime = "image/gif" | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	options := gofpdf.ImageOptions{ | ||||||
|  | 		ImageType:             imageType, | ||||||
|  | 		ReadDpi:               readDpi, | ||||||
|  | 		AllowNegativePosition: allowNegativePosition, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	info := b.b.RegisterImageOptionsReader(imgName, options, bytes.NewReader(bin)) | ||||||
|  |  | ||||||
|  | 	return &PDFImageRef{ | ||||||
|  | 		Name:  imgName, | ||||||
|  | 		Info:  info, | ||||||
|  | 		Bin:   bin, | ||||||
|  | 		Image: nil, | ||||||
|  | 		Mime:  mime, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type PDFImageOpt struct { | ||||||
|  | 	x                     *float64 | ||||||
|  | 	y                     *float64 | ||||||
|  | 	width                 *float64 | ||||||
|  | 	height                *float64 | ||||||
|  | 	flow                  *bool | ||||||
|  | 	link                  *int | ||||||
|  | 	linkStr               *string | ||||||
|  | 	imageType             *string | ||||||
|  | 	readDpi               *bool | ||||||
|  | 	allowNegativePosition *bool | ||||||
|  | 	imageFit              *imageext.ImageFit | ||||||
|  | 	fillColor             *color.Color | ||||||
|  | 	compression           *imageext.ImageCompresson | ||||||
|  | 	reEncodePixelPerMM    *float64 | ||||||
|  | 	crop                  *imageext.ImageCrop | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPDFImageOpt() *PDFImageOpt { | ||||||
|  | 	return &PDFImageOpt{} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) X(v float64) *PDFImageOpt { | ||||||
|  | 	opt.x = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) Y(v float64) *PDFImageOpt { | ||||||
|  | 	opt.y = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) Width(v float64) *PDFImageOpt { | ||||||
|  | 	opt.width = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) Height(v float64) *PDFImageOpt { | ||||||
|  | 	opt.height = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) Flow(v bool) *PDFImageOpt { | ||||||
|  | 	opt.flow = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) Link(v int) *PDFImageOpt { | ||||||
|  | 	opt.link = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) LinkStr(v string) *PDFImageOpt { | ||||||
|  | 	opt.linkStr = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) ImageType(v string) *PDFImageOpt { | ||||||
|  | 	opt.imageType = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) ReadDpi(v bool) *PDFImageOpt { | ||||||
|  | 	opt.readDpi = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) AllowNegativePosition(v bool) *PDFImageOpt { | ||||||
|  | 	opt.allowNegativePosition = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) ImageFit(v imageext.ImageFit) *PDFImageOpt { | ||||||
|  | 	opt.imageFit = &v | ||||||
|  | 	return opt | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) FillColor(v color.Color) *PDFImageOpt { | ||||||
|  | 	opt.fillColor = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) Compression(v imageext.ImageCompresson) *PDFImageOpt { | ||||||
|  | 	opt.compression = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) ReEncodePixelPerMM(v float64) *PDFImageOpt { | ||||||
|  | 	opt.reEncodePixelPerMM = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFImageOpt) Crop(cropX float64, cropY float64, cropWidth float64, cropHeight float64) *PDFImageOpt { | ||||||
|  | 	opt.crop = &imageext.ImageCrop{ | ||||||
|  | 		CropX:      cropX, | ||||||
|  | 		CropY:      cropY, | ||||||
|  | 		CropWidth:  cropWidth, | ||||||
|  | 		CropHeight: cropHeight, | ||||||
|  | 	} | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) { | ||||||
|  | 	var err error | ||||||
|  |  | ||||||
|  | 	x := b.GetX() | ||||||
|  | 	y := b.GetY() | ||||||
|  | 	w := img.Info.Width() | ||||||
|  | 	h := img.Info.Height() | ||||||
|  | 	flow := true | ||||||
|  | 	link := 0 | ||||||
|  | 	linkStr := "" | ||||||
|  | 	imageType := "" | ||||||
|  | 	readDpi := false | ||||||
|  | 	allowNegativePosition := false | ||||||
|  | 	reEncodePixelPerMM := 15.0 | ||||||
|  | 	var imageFit *imageext.ImageFit = nil | ||||||
|  | 	var fillColor color.Color = color.Transparent | ||||||
|  | 	compression := imageext.CompressionPNGSpeed | ||||||
|  | 	var crop *imageext.ImageCrop = nil | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		x = langext.Coalesce(opt.x, x) | ||||||
|  | 		y = langext.Coalesce(opt.y, y) | ||||||
|  | 		w = langext.Coalesce(opt.width, w) | ||||||
|  | 		h = langext.Coalesce(opt.height, h) | ||||||
|  | 		flow = langext.Coalesce(opt.flow, flow) | ||||||
|  | 		link = langext.Coalesce(opt.link, link) | ||||||
|  | 		linkStr = langext.Coalesce(opt.linkStr, linkStr) | ||||||
|  | 		imageType = langext.Coalesce(opt.imageType, imageType) | ||||||
|  | 		readDpi = langext.Coalesce(opt.readDpi, readDpi) | ||||||
|  | 		allowNegativePosition = langext.Coalesce(opt.allowNegativePosition, allowNegativePosition) | ||||||
|  | 		imageFit = langext.CoalesceOpt(opt.imageFit, imageFit) | ||||||
|  | 		fillColor = langext.Coalesce(opt.fillColor, fillColor) | ||||||
|  | 		compression = langext.Coalesce(opt.compression, compression) | ||||||
|  | 		reEncodePixelPerMM = langext.Coalesce(opt.reEncodePixelPerMM, reEncodePixelPerMM) | ||||||
|  | 		crop = langext.CoalesceOpt(opt.crop, crop) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	regName := img.Name | ||||||
|  |  | ||||||
|  | 	if imageFit != nil || fillColor != nil || crop != nil { | ||||||
|  |  | ||||||
|  | 		var dataimg image.Image | ||||||
|  | 		if img.Image != nil { | ||||||
|  | 			dataimg = *img.Image | ||||||
|  | 		} else { | ||||||
|  | 			dataimg, err = imageext.VerifyAndDecodeImage(bytes.NewReader(img.Bin), img.Mime) | ||||||
|  | 			if err != nil { | ||||||
|  | 				b.b.SetError(err) | ||||||
|  | 				return | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if crop != nil { | ||||||
|  | 			dataimg, err = imageext.CropImage(dataimg, crop.CropX, crop.CropY, crop.CropWidth, crop.CropHeight) | ||||||
|  | 			if err != nil { | ||||||
|  | 				b.b.SetError(err) | ||||||
|  | 				return | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if imageFit != nil { | ||||||
|  | 			pdfPixelPerMillimeter := 15.0 | ||||||
|  |  | ||||||
|  | 			pxw := w * pdfPixelPerMillimeter | ||||||
|  | 			pxh := h * pdfPixelPerMillimeter | ||||||
|  |  | ||||||
|  | 			dataimg, err = imageext.ObjectFitImage(dataimg, pxw, pxh, *imageFit, fillColor) | ||||||
|  | 			if err != nil { | ||||||
|  | 				b.b.SetError(err) | ||||||
|  | 				return | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		bfr, imgMime, err := imageext.EncodeImage(dataimg, compression) | ||||||
|  | 		if err != nil { | ||||||
|  | 			b.b.SetError(err) | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		regName = regName + "_" + langext.MustRawHexUUID() | ||||||
|  |  | ||||||
|  | 		switch imgMime { | ||||||
|  | 		case "image/jpeg": | ||||||
|  | 			imageType = "JPEG" | ||||||
|  | 		case "image/png": | ||||||
|  | 			imageType = "PNG" | ||||||
|  | 		case "image/gif": | ||||||
|  | 			imageType = "GIF" | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		b.b.RegisterImageOptionsReader(regName, gofpdf.ImageOptions{ImageType: imageType}, &bfr) | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	fpdfOpt := gofpdf.ImageOptions{ | ||||||
|  | 		ImageType:             imageType, | ||||||
|  | 		ReadDpi:               readDpi, | ||||||
|  | 		AllowNegativePosition: allowNegativePosition, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b.b.ImageOptions(regName, x, y, w, h, flow, fpdfOpt, link, linkStr) | ||||||
|  | } | ||||||
							
								
								
									
										194
									
								
								wpdf/wpdfMultiCell.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										194
									
								
								wpdf/wpdfMultiCell.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,194 @@ | |||||||
|  | package wpdf | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  |  | ||||||
|  | type PDFMultiCellOpt struct { | ||||||
|  | 	width             *float64 | ||||||
|  | 	height            *float64 | ||||||
|  | 	border            *PDFBorder | ||||||
|  | 	align             *PDFTextAlign | ||||||
|  | 	fill              *bool | ||||||
|  | 	fontNameOverride  *PDFFontFamily | ||||||
|  | 	fontStyleOverride *PDFFontStyle | ||||||
|  | 	fontSizeOverride  *float64 | ||||||
|  | 	extraLn           *float64 | ||||||
|  | 	x                 *float64 | ||||||
|  | 	textColor         *PDFColor | ||||||
|  | 	borderColor       *PDFColor | ||||||
|  | 	fillColor         *PDFColor | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPDFMultiCellOpt() *PDFMultiCellOpt { | ||||||
|  | 	return &PDFMultiCellOpt{} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) Width(v float64) *PDFMultiCellOpt { | ||||||
|  | 	opt.width = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) Height(v float64) *PDFMultiCellOpt { | ||||||
|  | 	opt.height = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) Border(v PDFBorder) *PDFMultiCellOpt { | ||||||
|  | 	opt.border = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) Align(v PDFTextAlign) *PDFMultiCellOpt { | ||||||
|  | 	opt.align = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) FillBackground(v bool) *PDFMultiCellOpt { | ||||||
|  | 	opt.fill = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) Font(fontName PDFFontFamily, fontStyle PDFFontStyle, fontSize float64) *PDFMultiCellOpt { | ||||||
|  | 	opt.fontNameOverride = &fontName | ||||||
|  | 	opt.fontStyleOverride = &fontStyle | ||||||
|  | 	opt.fontSizeOverride = &fontSize | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) FontName(v PDFFontFamily) *PDFMultiCellOpt { | ||||||
|  | 	opt.fontNameOverride = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) FontStyle(v PDFFontStyle) *PDFMultiCellOpt { | ||||||
|  | 	opt.fontStyleOverride = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) FontSize(v float64) *PDFMultiCellOpt { | ||||||
|  | 	opt.fontSizeOverride = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) Bold() *PDFMultiCellOpt { | ||||||
|  | 	opt.fontStyleOverride = langext.Ptr(Bold) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) Italic() *PDFMultiCellOpt { | ||||||
|  | 	opt.fontStyleOverride = langext.Ptr(Italic) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) LnAfter(v float64) *PDFMultiCellOpt { | ||||||
|  | 	opt.extraLn = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) X(v float64) *PDFMultiCellOpt { | ||||||
|  | 	opt.x = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) TextColor(cr, cg, cb int) *PDFMultiCellOpt { | ||||||
|  | 	opt.textColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) TextColorHex(c uint32) *PDFMultiCellOpt { | ||||||
|  | 	opt.textColor = langext.Ptr(hexToColor(c)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) BorderColor(cr, cg, cb int) *PDFMultiCellOpt { | ||||||
|  | 	opt.borderColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) BorderColorHex(c uint32) *PDFMultiCellOpt { | ||||||
|  | 	opt.borderColor = langext.Ptr(hexToColor(c)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) FillColor(cr, cg, cb int) *PDFMultiCellOpt { | ||||||
|  | 	opt.fillColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFMultiCellOpt) FillColorHex(c uint32) *PDFMultiCellOpt { | ||||||
|  | 	opt.fillColor = langext.Ptr(hexToColor(c)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) MultiCell(txt string, opts ...*PDFMultiCellOpt) { | ||||||
|  |  | ||||||
|  | 	txtTR := b.tr(txt) | ||||||
|  |  | ||||||
|  | 	width := float64(0) | ||||||
|  | 	height := b.cellHeight + b.cellSpacing | ||||||
|  | 	border := BorderNone | ||||||
|  | 	align := AlignLeft | ||||||
|  | 	fill := false | ||||||
|  | 	var fontNameOverride *PDFFontFamily | ||||||
|  | 	var fontStyleOverride *PDFFontStyle | ||||||
|  | 	var fontSizeOverride *float64 | ||||||
|  | 	extraLn := float64(0) | ||||||
|  | 	var x *float64 | ||||||
|  | 	var textColor *PDFColor | ||||||
|  | 	var borderColor *PDFColor | ||||||
|  | 	var fillColor *PDFColor | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		width = langext.Coalesce(opt.width, width) | ||||||
|  | 		height = langext.Coalesce(opt.height, height) | ||||||
|  | 		border = langext.Coalesce(opt.border, border) | ||||||
|  | 		align = langext.Coalesce(opt.align, align) | ||||||
|  | 		fill = langext.Coalesce(opt.fill, fill) | ||||||
|  | 		fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride) | ||||||
|  | 		fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride) | ||||||
|  | 		fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride) | ||||||
|  | 		extraLn = langext.Coalesce(opt.extraLn, extraLn) | ||||||
|  | 		x = langext.CoalesceOpt(opt.x, x) | ||||||
|  | 		textColor = langext.CoalesceOpt(opt.textColor, textColor) | ||||||
|  | 		borderColor = langext.CoalesceOpt(opt.borderColor, borderColor) | ||||||
|  | 		fillColor = langext.CoalesceOpt(opt.fillColor, fillColor) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil { | ||||||
|  | 		oldFontName := b.fontName | ||||||
|  | 		oldFontStyle := b.fontStyle | ||||||
|  | 		oldFontSize := b.fontSize | ||||||
|  | 		newFontName := langext.Coalesce(fontNameOverride, oldFontName) | ||||||
|  | 		newFontStyle := langext.Coalesce(fontStyleOverride, oldFontStyle) | ||||||
|  | 		newFontSize := langext.Coalesce(fontSizeOverride, oldFontSize) | ||||||
|  | 		b.SetFont(newFontName, newFontStyle, newFontSize) | ||||||
|  | 		defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if textColor != nil { | ||||||
|  | 		oldColorR, oldColorG, oldColorB := b.b.GetTextColor() | ||||||
|  | 		b.SetTextColor(textColor.R, textColor.G, textColor.B) | ||||||
|  | 		defer func() { b.SetTextColor(oldColorR, oldColorG, oldColorB) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if borderColor != nil { | ||||||
|  | 		oldColorR, oldColorG, oldColorB := b.b.GetDrawColor() | ||||||
|  | 		b.SetDrawColor(borderColor.R, borderColor.G, borderColor.B) | ||||||
|  | 		defer func() { b.SetDrawColor(oldColorR, oldColorG, oldColorB) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fillColor != nil { | ||||||
|  | 		oldColorR, oldColorG, oldColorB := b.b.GetFillColor() | ||||||
|  | 		b.SetFillColor(fillColor.R, fillColor.G, fillColor.B) | ||||||
|  | 		defer func() { b.SetFillColor(oldColorR, oldColorG, oldColorB) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if x != nil { | ||||||
|  | 		b.b.SetX(*x) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b.b.MultiCell(width, height, txtTR, string(border), string(align), fill) | ||||||
|  |  | ||||||
|  | 	if extraLn != 0 { | ||||||
|  | 		b.b.Ln(extraLn) | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										126
									
								
								wpdf/wpdfRect.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										126
									
								
								wpdf/wpdfRect.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,126 @@ | |||||||
|  | package wpdf | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  |  | ||||||
|  | type PDFRectOpt struct { | ||||||
|  | 	x         *float64 | ||||||
|  | 	y         *float64 | ||||||
|  | 	lineWidth *float64 | ||||||
|  | 	drawColor *PDFColor | ||||||
|  | 	fillColor *PDFColor | ||||||
|  | 	radiusTL  *float64 | ||||||
|  | 	radiusTR  *float64 | ||||||
|  | 	radiusBR  *float64 | ||||||
|  | 	radiusBL  *float64 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewPDFRectOpt() *PDFRectOpt { | ||||||
|  | 	return &PDFRectOpt{} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) X(v float64) *PDFRectOpt { | ||||||
|  | 	opt.x = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) Y(v float64) *PDFRectOpt { | ||||||
|  | 	opt.y = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) LineWidth(v float64) *PDFRectOpt { | ||||||
|  | 	opt.lineWidth = &v | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) DrawColor(cr, cg, cb int) *PDFRectOpt { | ||||||
|  | 	opt.drawColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) DrawColorHex(c uint32) *PDFRectOpt { | ||||||
|  | 	opt.drawColor = langext.Ptr(hexToColor(c)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) FillColor(cr, cg, cb int) *PDFRectOpt { | ||||||
|  | 	opt.fillColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) FillColorHex(c uint32) *PDFRectOpt { | ||||||
|  | 	opt.fillColor = langext.Ptr(hexToColor(c)) | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) Rounded(radius float64) *PDFRectOpt { | ||||||
|  | 	opt.radiusTL = &radius | ||||||
|  | 	opt.radiusTR = &radius | ||||||
|  | 	opt.radiusBR = &radius | ||||||
|  | 	opt.radiusBL = &radius | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) RadiusTL(radius float64) *PDFRectOpt { | ||||||
|  | 	opt.radiusTL = &radius | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) RadiusTR(radius float64) *PDFRectOpt { | ||||||
|  | 	opt.radiusTR = &radius | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) RadiusBL(radius float64) *PDFRectOpt { | ||||||
|  | 	opt.radiusBL = &radius | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (opt *PDFRectOpt) RadiusBR(radius float64) *PDFRectOpt { | ||||||
|  | 	opt.radiusBR = &radius | ||||||
|  | 	return opt | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (b *WPDFBuilder) Rect(w float64, h float64, styleStr PDFRectStyle, opts ...*PDFRectOpt) { | ||||||
|  | 	x := b.GetX() | ||||||
|  | 	y := b.GetY() | ||||||
|  | 	var lineWidth *float64 | ||||||
|  | 	var drawColor *PDFColor | ||||||
|  | 	var fillColor *PDFColor | ||||||
|  | 	radiusTL := float64(0) | ||||||
|  | 	radiusTR := float64(0) | ||||||
|  | 	radiusBR := float64(0) | ||||||
|  | 	radiusBL := float64(0) | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		x = langext.Coalesce(opt.x, x) | ||||||
|  | 		y = langext.Coalesce(opt.y, y) | ||||||
|  | 		lineWidth = langext.CoalesceOpt(opt.lineWidth, lineWidth) | ||||||
|  | 		drawColor = langext.CoalesceOpt(opt.drawColor, drawColor) | ||||||
|  | 		fillColor = langext.CoalesceOpt(opt.fillColor, fillColor) | ||||||
|  | 		radiusTL = langext.Coalesce(opt.radiusTL, radiusTL) | ||||||
|  | 		radiusTR = langext.Coalesce(opt.radiusTR, radiusTR) | ||||||
|  | 		radiusBR = langext.Coalesce(opt.radiusBR, radiusBR) | ||||||
|  | 		radiusBL = langext.Coalesce(opt.radiusBL, radiusBL) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if lineWidth != nil { | ||||||
|  | 		old := b.GetLineWidth() | ||||||
|  | 		b.SetLineWidth(*lineWidth) | ||||||
|  | 		defer func() { b.SetLineWidth(old) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if drawColor != nil { | ||||||
|  | 		oldR, oldG, oldB := b.GetDrawColor() | ||||||
|  | 		b.SetDrawColor(drawColor.R, drawColor.G, drawColor.B) | ||||||
|  | 		defer func() { b.SetDrawColor(oldR, oldG, oldB) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fillColor != nil { | ||||||
|  | 		oldR, oldG, oldB := b.GetFillColor() | ||||||
|  | 		b.SetFillColor(fillColor.R, fillColor.G, fillColor.B) | ||||||
|  | 		defer func() { b.SetFillColor(oldR, oldG, oldB) }() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b.b.RoundedRectExt(x, y, w, h, radiusTL, radiusTR, radiusBR, radiusBL, string(styleStr)) | ||||||
|  | } | ||||||
		Reference in New Issue
	
	Block a user