Compare commits
	
		
			81 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| a4a8c83d17 | |||
| e952176bb0 | |||
| d99adb203b | |||
| f1f91f4cfa | |||
| 2afb265ea4 | |||
| be24f7a190 | |||
| aae8a706e9 | |||
| 7d64f18f54 | |||
| d08b2e565a | |||
| d29e84894d | |||
| 617298c366 | |||
| 668f308565 | |||
| 240a8ed7aa | |||
| 70de8e8d04 | |||
| d38fa60fbc | |||
| 5fba7e0e2f | |||
| 8757643399 | |||
| 42bd4cf58d | |||
| 413178e2d3 | |||
| 9264a2e99b | |||
| 2a0471fb3d | |||
| 1497c013f9 | |||
| ef78b7467b | |||
| 0eda32b725 | |||
| f9ccafb976 | |||
| 6e90239fef | |||
| 05580c384a | |||
| 3188b951fb | |||
| 6b211d1443 | |||
| b2b9b40792 | |||
| 2f915cb6c1 | |||
| b2b93f570a | |||
| 8247fc4524 | |||
| 5dad44ad09 | |||
| f042183433 | |||
| b0be93a7a0 | |||
| 1c143921e6 | |||
| 68e63a9cf6 | |||
| c3162fec95 | |||
| 1124aa781a | |||
| eef0e9f2aa | |||
| af38b06d22 | |||
| 2fad6340c7 | |||
| 03aa0a2282 | |||
| 358c238f3d | |||
| d65ac8ba2b | |||
| 55d02b8c65 | |||
| 8a3965f666 | |||
| 4aa2f494b1 | |||
| 8f13eb2f16 | |||
| 8f15d42173 | |||
| 07fa21dcca | |||
| e657de7f78 | |||
| c534e998e8 | |||
| 88642770c5 | |||
| 8528b5cb66 | |||
| 5ba84bd8ee | |||
| 1260b2dc77 | |||
| 7d18b913c6 | |||
| d1f9069f2f | |||
| fa6d73301e | |||
| bfe62799d3 | |||
| ede912eb7b | |||
| ff8f128fe8 | |||
| 1971f1396f | |||
| bf6c184d12 | |||
| 770f5c5c64 | |||
| 623c021689 | |||
| afcc89bf9e | |||
| 1672e8f8fd | |||
| 398ed56d32 | |||
| f3ecba3883 | |||
| 45031b05cf | |||
| 7413ea045d | |||
| 62c9a4e734 | |||
| 3a8baaa6d9 | |||
| 498785e213 | |||
| 678f95642c | |||
| dacc97e2ce | |||
| f8c0c0afa0 | |||
| 2fbd5cf965 | 
| @@ -6,7 +6,12 @@ | |||||||
| name: Build Docker and Deploy | name: Build Docker and Deploy | ||||||
| run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }} | run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }} | ||||||
|  |  | ||||||
| on: [push] | on: | ||||||
|  |   push: | ||||||
|  |     branches: | ||||||
|  |       - '*' | ||||||
|  |       - '**' | ||||||
|  |  | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   run_tests: |   run_tests: | ||||||
| @@ -34,3 +39,17 @@ jobs: | |||||||
|       - name: Run tests |       - name: Run tests | ||||||
|         run: cd "${{ gitea.workspace }}" && make test |         run: cd "${{ gitea.workspace }}" && make test | ||||||
|  |  | ||||||
|  |       - name: Send failure mail | ||||||
|  |         if: failure() | ||||||
|  |         uses: dawidd6/action-send-mail@v3 | ||||||
|  |         with: | ||||||
|  |           server_address: smtp.fastmail.com | ||||||
|  |           server_port: 465 | ||||||
|  |           secure: true | ||||||
|  |           username: ${{secrets.MAIL_USERNAME}} | ||||||
|  |           password: ${{secrets.MAIL_PASSWORD}} | ||||||
|  |           subject: Pipeline on '${{ gitea.repository }}' failed | ||||||
|  |           to: ${{ steps.commiter_info.outputs.MAIL }} | ||||||
|  |           from: Gitea Actions <gitea_actions@blackforestbytes.de> | ||||||
|  |           body: "Go to https://gogs.blackforestbytes.com/${{ gitea.repository }}/actions" | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								.idea/golinter.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.idea/golinter.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8"?> | ||||||
|  | <project version="4"> | ||||||
|  |   <component name="GoLinterSettings"> | ||||||
|  |     <option name="checkGoLinterExe" value="false" /> | ||||||
|  |   </component> | ||||||
|  | </project> | ||||||
							
								
								
									
										6
									
								
								.idea/sqldialects.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.idea/sqldialects.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8"?> | ||||||
|  | <project version="4"> | ||||||
|  |   <component name="SqlDialectMappings"> | ||||||
|  |     <file url="file://$PROJECT_DIR$/sq/sq_test.go" dialect="SQLite" /> | ||||||
|  |   </component> | ||||||
|  | </project> | ||||||
| @@ -23,6 +23,7 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | |||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | ||||||
| | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | ||||||
|  | | pagination  | Mike       | Pagination implementation                                                                                     | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | ||||||
| | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | ||||||
| @@ -40,3 +41,5 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | |||||||
| | rext        | Mike       | Regex Wrapper, wraps regexp with a better interface                                                           | | | rext        | Mike       | Regex Wrapper, wraps regexp with a better interface                                                           | | ||||||
| | wmo         | Mike       | Mongo Wrapper, wraps mongodb with a better interface                                                          | | | wmo         | Mike       | Mongo Wrapper, wraps mongodb with a better interface                                                          | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
|  | | scn         | Mike       | SimpleCloudNotifier                                                                                           | | ||||||
|  | |             |            |                                                                                                               | | ||||||
							
								
								
									
										
											BIN
										
									
								
								bfcodegen/_test_example_2.tgz
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								bfcodegen/_test_example_2.tgz
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| @@ -5,8 +5,8 @@ import ( | |||||||
| 	_ "embed" | 	_ "embed" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -17,7 +17,6 @@ import ( | |||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"text/template" | 	"text/template" | ||||||
| 	"time" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type CSIDDef struct { | type CSIDDef struct { | ||||||
| @@ -71,9 +70,9 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | |||||||
| 	newChecksum := cryptext.BytesSha256([]byte(newChecksumStr)) | 	newChecksum := cryptext.BytesSha256([]byte(newChecksumStr)) | ||||||
|  |  | ||||||
| 	if newChecksum != oldChecksum { | 	if newChecksum != oldChecksum { | ||||||
| 		fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | 		fmt.Printf("[CSIDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | ||||||
| 	} else { | 	} else { | ||||||
| 		fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | 		fmt.Printf("[CSIDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | ||||||
| 		return nil | 		return nil | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -101,25 +100,16 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | |||||||
| 		return errors.New("no package name found in any file") | 		return errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtCSIDOutput(newChecksum, allIDs, pkgname)), 0o755) | 	fdata, err := format.Source([]byte(fmtCSIDOutput(newChecksum, allIDs, pkgname))) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -3,6 +3,7 @@ | |||||||
| package {{.PkgName}} | package {{.PkgName}} | ||||||
|  |  | ||||||
| import "crypto/rand" | import "crypto/rand" | ||||||
|  | import "crypto/sha256" | ||||||
| import "fmt" | import "fmt" | ||||||
| import "github.com/go-playground/validator/v10" | import "github.com/go-playground/validator/v10" | ||||||
| import "github.com/rs/zerolog/log" | import "github.com/rs/zerolog/log" | ||||||
| @@ -65,6 +66,27 @@ func generateID(prefix string) string { | |||||||
| 	return prefix + k + checkstr | 	return prefix + k + checkstr | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func generateIDFromSeed(prefix string, seed string) string { | ||||||
|  | 	h := sha256.New() | ||||||
|  |  | ||||||
|  | 	iddata := "" | ||||||
|  | 	for len(iddata) < idlen-len(prefix)-checklen { | ||||||
|  | 		h.Write([]byte(seed)) | ||||||
|  | 		bs := h.Sum(nil) | ||||||
|  | 		iddata += langext.NewAnyBaseConverter(idCharset).Encode(bs) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := 0; i < idlen-len(prefix)-checklen; i++ { | ||||||
|  | 		ichr := int(iddata[i]) | ||||||
|  | 		checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  |  | ||||||
|  | 	return prefix + iddata[:(idlen-len(prefix)-checklen)] + checkstr | ||||||
|  | } | ||||||
|  |  | ||||||
| func validateID(prefix string, value string) error { | func validateID(prefix string, value string) error { | ||||||
| 	if len(value) != idlen { | 	if len(value) != idlen { | ||||||
| 		return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build() | 		return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build() | ||||||
|   | |||||||
| @@ -12,8 +12,8 @@ import ( | |||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| //go:embed _test_example.tgz | //go:embed _test_example_1.tgz | ||||||
| var CSIDExampleModels []byte | var CSIDExampleModels1 []byte | ||||||
|  |  | ||||||
| func TestGenerateCSIDSpecs(t *testing.T) { | func TestGenerateCSIDSpecs(t *testing.T) { | ||||||
|  |  | ||||||
| @@ -21,7 +21,7 @@ func TestGenerateCSIDSpecs(t *testing.T) { | |||||||
|  |  | ||||||
| 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
| 	err := os.WriteFile(tmpFile, CSIDExampleModels, 0o777) | 	err := os.WriteFile(tmpFile, CSIDExampleModels1, 0o777) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|   | |||||||
| @@ -3,10 +3,11 @@ package bfcodegen | |||||||
| import ( | import ( | ||||||
| 	"bytes" | 	"bytes" | ||||||
| 	_ "embed" | 	_ "embed" | ||||||
|  | 	"encoding/json" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -14,16 +15,18 @@ import ( | |||||||
| 	"os" | 	"os" | ||||||
| 	"path" | 	"path" | ||||||
| 	"path/filepath" | 	"path/filepath" | ||||||
|  | 	"reflect" | ||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"text/template" | 	"text/template" | ||||||
| 	"time" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type EnumDefVal struct { | type EnumDefVal struct { | ||||||
| 	VarName     string | 	VarName     string | ||||||
| 	Value       string | 	Value       string | ||||||
| 	Description *string | 	Description *string | ||||||
|  | 	Data        *map[string]any | ||||||
|  | 	RawComment  *string | ||||||
| } | } | ||||||
|  |  | ||||||
| type EnumDef struct { | type EnumDef struct { | ||||||
| @@ -38,7 +41,7 @@ var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_] | |||||||
|  |  | ||||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||||
|  |  | ||||||
| var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`)) | var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<comm>.*))?.*$`)) | ||||||
|  |  | ||||||
| var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
| @@ -47,11 +50,6 @@ var templateEnumGenerateText string | |||||||
|  |  | ||||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	oldChecksum := "N/A" | 	oldChecksum := "N/A" | ||||||
| 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||||
| 		content, err := os.ReadFile(destFile) | 		content, err := os.ReadFile(destFile) | ||||||
| @@ -63,6 +61,30 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !changed { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = os.WriteFile(destFile, []byte(gocode), 0o755) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool) (string, string, bool, error) { | ||||||
|  |  | ||||||
|  | 	files, err := os.ReadDir(sourceDir) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", "", false, err | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) | ||||||
| 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) | ||||||
| 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") }) | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") }) | ||||||
| @@ -72,7 +94,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
| 		content, err := os.ReadFile(path.Join(sourceDir, f.Name())) | 		content, err := os.ReadFile(path.Join(sourceDir, f.Name())) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return "", "", false, err | ||||||
| 		} | 		} | ||||||
| 		newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content) | 		newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content) | ||||||
| 	} | 	} | ||||||
| @@ -83,7 +105,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | 		fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | ||||||
| 	} else { | 	} else { | ||||||
| 		fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | 		fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | ||||||
| 		return nil | 		return "", oldChecksum, false, nil | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	allEnums := make([]EnumDef, 0) | 	allEnums := make([]EnumDef, 0) | ||||||
| @@ -94,7 +116,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		fmt.Printf("========= %s =========\n\n", f.Name()) | 		fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
| 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name())) | 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name())) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return "", "", false, err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		fmt.Printf("\n") | 		fmt.Printf("\n") | ||||||
| @@ -107,29 +129,21 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if pkgname == "" { | 	if pkgname == "" { | ||||||
| 		return errors.New("no package name found in any file") | 		return "", "", false, errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtEnumOutput(newChecksum, allEnums, pkgname)), 0o755) | 	rdata := fmtEnumOutput(newChecksum, allEnums, pkgname) | ||||||
|  |  | ||||||
|  | 	if !gofmt { | ||||||
|  | 		return rdata, newChecksum, true, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	fdata, err := format.Source([]byte(rdata)) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return "", "", false, err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	return string(fdata), newChecksum, true, nil | ||||||
| 	if err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | ||||||
| @@ -181,10 +195,34 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
|  |  | ||||||
| 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | ||||||
| 			typename := match.GroupByName("type").Value() | 			typename := match.GroupByName("type").Value() | ||||||
|  |  | ||||||
|  | 			comment := match.GroupByNameOrEmpty("comm").ValueOrNil() | ||||||
|  | 			var descr *string = nil | ||||||
|  | 			var data *map[string]any = nil | ||||||
|  | 			if comment != nil { | ||||||
|  | 				comment = langext.Ptr(strings.TrimSpace(*comment)) | ||||||
|  | 				if strings.HasPrefix(*comment, "{") { | ||||||
|  | 					if v, ok := tryParseDataComment(*comment); ok { | ||||||
|  | 						data = &v | ||||||
|  | 						if anyDataDescr, ok := v["description"]; ok { | ||||||
|  | 							if dataDescr, ok := anyDataDescr.(string); ok { | ||||||
|  | 								descr = &dataDescr | ||||||
|  | 							} | ||||||
|  | 						} | ||||||
|  | 					} else { | ||||||
|  | 						descr = comment | ||||||
|  | 					} | ||||||
|  | 				} else { | ||||||
|  | 					descr = comment | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  |  | ||||||
| 			def := EnumDefVal{ | 			def := EnumDefVal{ | ||||||
| 				VarName:     match.GroupByName("name").Value(), | 				VarName:     match.GroupByName("name").Value(), | ||||||
| 				Value:       match.GroupByName("value").Value(), | 				Value:       match.GroupByName("value").Value(), | ||||||
| 				Description: match.GroupByNameOrEmpty("descr").ValueOrNil(), | 				RawComment:  comment, | ||||||
|  | 				Description: descr, | ||||||
|  | 				Data:        data, | ||||||
| 			} | 			} | ||||||
|  |  | ||||||
| 			found := false | 			found := false | ||||||
| @@ -209,6 +247,41 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| 	return enums, pkgname, nil | 	return enums, pkgname, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func tryParseDataComment(s string) (map[string]any, bool) { | ||||||
|  |  | ||||||
|  | 	r := make(map[string]any) | ||||||
|  |  | ||||||
|  | 	err := json.Unmarshal([]byte(s), &r) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, v := range r { | ||||||
|  |  | ||||||
|  | 		rv := reflect.ValueOf(v) | ||||||
|  |  | ||||||
|  | 		if rv.Kind() == reflect.Ptr && rv.IsNil() { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  | 		if rv.Kind() == reflect.Bool { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  | 		if rv.Kind() == reflect.String { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  | 		if rv.Kind() == reflect.Int64 { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  | 		if rv.Kind() == reflect.Float64 { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return nil, false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, true | ||||||
|  | } | ||||||
|  |  | ||||||
| func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string { | func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string { | ||||||
|  |  | ||||||
| 	templ := template.New("enum-generate") | 	templ := template.New("enum-generate") | ||||||
| @@ -221,6 +294,47 @@ func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string { | |||||||
| 		"hasDescr": func(v EnumDef) bool { | 		"hasDescr": func(v EnumDef) bool { | ||||||
| 			return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil }) | 			return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil }) | ||||||
| 		}, | 		}, | ||||||
|  | 		"hasData": func(v EnumDef) bool { | ||||||
|  | 			return len(v.Values) > 0 && langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Data != nil }) | ||||||
|  | 		}, | ||||||
|  | 		"gostr": func(v any) string { | ||||||
|  | 			return fmt.Sprintf("%#+v", v) | ||||||
|  | 		}, | ||||||
|  | 		"goobj": func(name string, v any) string { | ||||||
|  | 			return fmt.Sprintf("%#+v", v) | ||||||
|  | 		}, | ||||||
|  | 		"godatakey": func(v string) string { | ||||||
|  | 			return strings.ToUpper(v[0:1]) + v[1:] | ||||||
|  | 		}, | ||||||
|  | 		"godatavalue": func(v any) string { | ||||||
|  | 			return fmt.Sprintf("%#+v", v) | ||||||
|  | 		}, | ||||||
|  | 		"godatatype": func(v any) string { | ||||||
|  | 			return fmt.Sprintf("%T", v) | ||||||
|  | 		}, | ||||||
|  | 		"mapindex": func(v map[string]any, k string) any { | ||||||
|  | 			return v[k] | ||||||
|  | 		}, | ||||||
|  | 		"generalDataKeys": func(v EnumDef) map[string]string { | ||||||
|  | 			r0 := make(map[string]int) | ||||||
|  |  | ||||||
|  | 			for _, eval := range v.Values { | ||||||
|  | 				for k := range *eval.Data { | ||||||
|  | 					if ctr, ok := r0[k]; ok { | ||||||
|  | 						r0[k] = ctr + 1 | ||||||
|  | 					} else { | ||||||
|  | 						r0[k] = 1 | ||||||
|  | 					} | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			r1 := langext.MapToArr(r0) | ||||||
|  | 			r2 := langext.ArrFilter(r1, func(p langext.MapEntry[string, int]) bool { return p.Value == len(v.Values) }) | ||||||
|  | 			r3 := langext.ArrMap(r2, func(p langext.MapEntry[string, int]) string { return p.Key }) | ||||||
|  | 			r4 := langext.ArrToKVMap(r3, func(p string) string { return p }, func(p string) string { return fmt.Sprintf("%T", (*v.Values[0].Data)[p]) }) | ||||||
|  |  | ||||||
|  | 			return r4 | ||||||
|  | 		}, | ||||||
| 	}) | 	}) | ||||||
|  |  | ||||||
| 	templ = template.Must(templ.Parse(templateEnumGenerateText)) | 	templ = template.Must(templ.Parse(templateEnumGenerateText)) | ||||||
|   | |||||||
| @@ -11,21 +11,38 @@ const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | |||||||
|  |  | ||||||
| {{ $hasStr   := ( . | hasStr   ) }} | {{ $hasStr   := ( . | hasStr   ) }} | ||||||
| {{ $hasDescr := ( . | hasDescr ) }} | {{ $hasDescr := ( . | hasDescr ) }} | ||||||
|  | {{ $hasData  := ( . | hasData  ) }} | ||||||
|  |  | ||||||
| // ================================ {{.EnumTypeName}} ================================ | // ================================ {{.EnumTypeName}} ================================ | ||||||
| // | // | ||||||
| // File:       {{.FileRelative}} | // File:       {{.FileRelative}} | ||||||
| // StringEnum: {{$hasStr   | boolToStr}} | // StringEnum: {{$hasStr   | boolToStr}} | ||||||
| // DescrEnum:  {{$hasDescr | boolToStr}} | // DescrEnum:  {{$hasDescr | boolToStr}} | ||||||
|  | // DataEnum:   {{$hasData  | boolToStr}} | ||||||
| // | // | ||||||
|  |  | ||||||
|  | {{ $typename := .EnumTypeName }} | ||||||
|  | {{ $enumdef  := . }} | ||||||
|  |  | ||||||
| var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}} | var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}} | ||||||
| 	{{.VarName}}, {{end}} | 	{{.VarName}}, {{end}} | ||||||
| } | } | ||||||
|  |  | ||||||
| {{if $hasDescr}} | {{if $hasDescr}} | ||||||
| var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}} | var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}} | ||||||
| 	{{.VarName}}: "{{.Description | deref | trimSpace}}", {{end}} | 	{{.VarName}}: {{.Description | deref | trimSpace | gostr}}, {{end}} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | {{if $hasData}} | ||||||
|  | type {{ .EnumTypeName }}Data struct { {{ range $datakey, $datatype := ($enumdef | generalDataKeys)  }} | ||||||
|  |     {{ $datakey | godatakey }} {{ $datatype }} `json:"{{ $datakey }}"` {{ end }} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var __{{.EnumTypeName}}Data = map[{{.EnumTypeName}}]{{.EnumTypeName}}Data{ {{range .Values}} {{ $enumvalue := . }} | ||||||
|  | 	{{.VarName}}: {{ $typename }}Data{ {{ range $datakey, $datatype := $enumdef | generalDataKeys  }} | ||||||
|  | 	    {{ $datakey | godatakey }}: {{ (mapindex $enumvalue.Data $datakey) | godatavalue }}, {{ end }} | ||||||
|  | 	}, {{end}} | ||||||
| } | } | ||||||
| {{end}} | {{end}} | ||||||
|  |  | ||||||
| @@ -64,6 +81,15 @@ func (e {{.EnumTypeName}}) Description() string { | |||||||
| } | } | ||||||
| {{end}} | {{end}} | ||||||
|  |  | ||||||
|  | {{if $hasData}} | ||||||
|  | func (e {{.EnumTypeName}}) Data() {{.EnumTypeName}}Data { | ||||||
|  | 	if d, ok := __{{.EnumTypeName}}Data[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return {{.EnumTypeName}}Data{} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
| func (e {{.EnumTypeName}}) VarName() string { | func (e {{.EnumTypeName}}) VarName() string { | ||||||
| 	if d, ok := __{{.EnumTypeName}}Varnames[e]; ok { | 	if d, ok := __{{.EnumTypeName}}Varnames[e]; ok { | ||||||
| 		return d | 		return d | ||||||
| @@ -75,6 +101,12 @@ func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | |||||||
|     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} |     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | func (e {{.EnumTypeName}}) DescriptionMeta() enums.EnumDescriptionMetaValue { | ||||||
|  |     return enums.EnumDescriptionMetaValue{VarName: e.VarName(), Value: e, Description: e.Description()} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
| func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) { | func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) { | ||||||
| 	for _, ev := range __{{.EnumTypeName}}Values { | 	for _, ev := range __{{.EnumTypeName}}Values { | ||||||
| 		if string(ev) == vv { | 		if string(ev) == vv { | ||||||
| @@ -94,4 +126,12 @@ func {{.EnumTypeName}}ValuesMeta() []enums.EnumMetaValue { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue { | ||||||
|  | 	return []enums.EnumDescriptionMetaValue{ {{range .Values}} | ||||||
|  |             {{.VarName}}.DescriptionMeta(), {{end}} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
| {{end}} | {{end}} | ||||||
| @@ -12,8 +12,11 @@ import ( | |||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| //go:embed _test_example.tgz | //go:embed _test_example_1.tgz | ||||||
| var EnumExampleModels []byte | var EnumExampleModels1 []byte | ||||||
|  |  | ||||||
|  | //go:embed _test_example_2.tgz | ||||||
|  | var EnumExampleModels2 []byte | ||||||
|  |  | ||||||
| func TestGenerateEnumSpecs(t *testing.T) { | func TestGenerateEnumSpecs(t *testing.T) { | ||||||
|  |  | ||||||
| @@ -21,7 +24,7 @@ func TestGenerateEnumSpecs(t *testing.T) { | |||||||
|  |  | ||||||
| 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
| 	err := os.WriteFile(tmpFile, EnumExampleModels, 0o777) | 	err := os.WriteFile(tmpFile, EnumExampleModels1, 0o777) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
| @@ -34,17 +37,53 @@ func TestGenerateEnumSpecs(t *testing.T) { | |||||||
| 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go") | 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go") | 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, cs1, cs2) | ||||||
|  | 	tst.AssertEqual(t, s1, s2) | ||||||
|  |  | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
| 	fmt.Println("=====================================================================================================") | 	fmt.Println("=====================================================================================================") | ||||||
| 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/enums_gen.go"))(t))) | 	fmt.Println(s1) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestGenerateEnumSpecsData(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, EnumExampleModels2, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(s1) | ||||||
| 	fmt.Println("=====================================================================================================") | 	fmt.Println("=====================================================================================================") | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
| 	fmt.Println() | 	fmt.Println() | ||||||
|   | |||||||
| @@ -5,8 +5,8 @@ import ( | |||||||
| 	_ "embed" | 	_ "embed" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -17,7 +17,6 @@ import ( | |||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"text/template" | 	"text/template" | ||||||
| 	"time" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type IDDef struct { | type IDDef struct { | ||||||
| @@ -100,25 +99,16 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | |||||||
| 		return errors.New("no package name found in any file") | 		return errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtIDOutput(newChecksum, allIDs, pkgname)), 0o755) | 	fdata, err := format.Source([]byte(fmtIDOutput(newChecksum, allIDs, pkgname))) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -12,8 +12,8 @@ import ( | |||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| //go:embed _test_example.tgz | //go:embed _test_example_1.tgz | ||||||
| var IDExampleModels []byte | var IDExampleModels1 []byte | ||||||
|  |  | ||||||
| func TestGenerateIDSpecs(t *testing.T) { | func TestGenerateIDSpecs(t *testing.T) { | ||||||
|  |  | ||||||
| @@ -21,7 +21,7 @@ func TestGenerateIDSpecs(t *testing.T) { | |||||||
|  |  | ||||||
| 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
| 	err := os.WriteFile(tmpFile, IDExampleModels, 0o777) | 	err := os.WriteFile(tmpFile, IDExampleModels1, 0o777) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|   | |||||||
| @@ -66,7 +66,6 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo | |||||||
| 		return int(version), nil, payload, false, nil, true | 		return int(version), nil, payload, false, nil, true | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// |  | ||||||
| 	if version == 2 { | 	if version == 2 { | ||||||
| 		if len(split) != 3 { | 		if len(split) != 3 { | ||||||
| 			return -1, nil, nil, false, nil, false | 			return -1, nil, nil, false, nil, false | ||||||
|   | |||||||
							
								
								
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,263 @@ | |||||||
|  | package cryptext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"crypto/rand" | ||||||
|  | 	"io" | ||||||
|  | 	"math/big" | ||||||
|  | 	mathrand "math/rand" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	ppStartChar            = "BCDFGHJKLMNPQRSTVWXZ" | ||||||
|  | 	ppEndChar              = "ABDEFIKMNORSTUXYZ" | ||||||
|  | 	ppVowel                = "AEIOUY" | ||||||
|  | 	ppConsonant            = "BCDFGHJKLMNPQRSTVWXZ" | ||||||
|  | 	ppSegmentLenMin        = 3 | ||||||
|  | 	ppSegmentLenMax        = 7 | ||||||
|  | 	ppMaxRepeatedVowel     = 2 | ||||||
|  | 	ppMaxRepeatedConsonant = 2 | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var ppContinuation = map[uint8]string{ | ||||||
|  | 	'A': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'B': "ADFIKLMNORSTUY", | ||||||
|  | 	'C': "AEIKOUY", | ||||||
|  | 	'D': "AEILORSUYZ", | ||||||
|  | 	'E': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'F': "ADEGIKLOPRTUY", | ||||||
|  | 	'G': "ABDEFHILMNORSTUY", | ||||||
|  | 	'H': "AEIOUY", | ||||||
|  | 	'I': "BCDFGHJKLMNPRSTVWXZ", | ||||||
|  | 	'J': "AEIOUY", | ||||||
|  | 	'K': "ADEFHILMNORSTUY", | ||||||
|  | 	'L': "ADEFGIJKMNOPSTUVWYZ", | ||||||
|  | 	'M': "ABEFIKOPSTUY", | ||||||
|  | 	'N': "ABEFIKOPSTUY", | ||||||
|  | 	'O': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'P': "AEFIJLORSTUY", | ||||||
|  | 	'Q': "AEIOUY", | ||||||
|  | 	'R': "ADEFGHIJKLMNOPSTUVYZ", | ||||||
|  | 	'S': "ACDEIKLOPTUYZ", | ||||||
|  | 	'T': "AEHIJOPRSUWY", | ||||||
|  | 	'U': "BCDFGHJKLMNPRSTVWXZ", | ||||||
|  | 	'V': "AEIOUY", | ||||||
|  | 	'W': "AEIOUY", | ||||||
|  | 	'X': "AEIOUY", | ||||||
|  | 	'Y': "ABCDFGHKLMNPRSTVXZ", | ||||||
|  | 	'Z': "AEILOTUY", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var ppLog2Map = map[int]float64{ | ||||||
|  | 	1:  0.00000000, | ||||||
|  | 	2:  1.00000000, | ||||||
|  | 	3:  1.58496250, | ||||||
|  | 	4:  2.00000000, | ||||||
|  | 	5:  2.32192809, | ||||||
|  | 	6:  2.58496250, | ||||||
|  | 	7:  2.80735492, | ||||||
|  | 	8:  3.00000000, | ||||||
|  | 	9:  3.16992500, | ||||||
|  | 	10: 3.32192809, | ||||||
|  | 	11: 3.45943162, | ||||||
|  | 	12: 3.58496250, | ||||||
|  | 	13: 3.70043972, | ||||||
|  | 	14: 3.80735492, | ||||||
|  | 	15: 3.90689060, | ||||||
|  | 	16: 4.00000000, | ||||||
|  | 	17: 4.08746284, | ||||||
|  | 	18: 4.16992500, | ||||||
|  | 	19: 4.24792751, | ||||||
|  | 	20: 4.32192809, | ||||||
|  | 	21: 4.39231742, | ||||||
|  | 	22: 4.45943162, | ||||||
|  | 	23: 4.52356196, | ||||||
|  | 	24: 4.58496250, | ||||||
|  | 	25: 4.64385619, | ||||||
|  | 	26: 4.70043972, | ||||||
|  | 	27: 4.75488750, | ||||||
|  | 	28: 4.80735492, | ||||||
|  | 	29: 4.85798100, | ||||||
|  | 	30: 4.90689060, | ||||||
|  | 	31: 4.95419631, | ||||||
|  | 	32: 5.00000000, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var ( | ||||||
|  | 	ppVowelMap     = ppMakeSet(ppVowel) | ||||||
|  | 	ppConsonantMap = ppMakeSet(ppConsonant) | ||||||
|  | 	ppEndCharMap   = ppMakeSet(ppEndChar) | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func ppMakeSet(v string) map[uint8]bool { | ||||||
|  | 	mp := make(map[uint8]bool, len(v)) | ||||||
|  | 	for _, chr := range v { | ||||||
|  | 		mp[uint8(chr)] = true | ||||||
|  | 	} | ||||||
|  | 	return mp | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppRandInt(rng io.Reader, max int) int { | ||||||
|  | 	v, err := rand.Int(rng, big.NewInt(int64(max))) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return int(v.Int64()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppRand(rng io.Reader, chars string, entropy *float64) uint8 { | ||||||
|  | 	chr := chars[ppRandInt(rng, len(chars))] | ||||||
|  |  | ||||||
|  | 	*entropy = *entropy + ppLog2Map[len(chars)] | ||||||
|  |  | ||||||
|  | 	return chr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharType(chr uint8) (bool, bool) { | ||||||
|  | 	_, ok1 := ppVowelMap[chr] | ||||||
|  | 	_, ok2 := ppConsonantMap[chr] | ||||||
|  |  | ||||||
|  | 	return ok1, ok2 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharsetRemove(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||||
|  | 	result := "" | ||||||
|  | 	for _, chr := range cs { | ||||||
|  | 		if _, ok := set[uint8(chr)]; !ok { | ||||||
|  | 			result += string(chr) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if result == "" && !allowEmpty { | ||||||
|  | 		return cs | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharsetFilter(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||||
|  | 	result := "" | ||||||
|  | 	for _, chr := range cs { | ||||||
|  | 		if _, ok := set[uint8(chr)]; ok { | ||||||
|  | 			result += string(chr) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if result == "" && !allowEmpty { | ||||||
|  | 		return cs | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePasswordExt(rng io.Reader, pwlen int) (string, float64) { | ||||||
|  |  | ||||||
|  | 	// kinda pseudo markov-chain - with a few extra rules and no weights... | ||||||
|  |  | ||||||
|  | 	if pwlen <= 0 { | ||||||
|  | 		return "", 0 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	vowelCount := 0 | ||||||
|  | 	consoCount := 0 | ||||||
|  | 	entropy := float64(0) | ||||||
|  |  | ||||||
|  | 	startChar := ppRand(rng, ppStartChar, &entropy) | ||||||
|  |  | ||||||
|  | 	result := string(startChar) | ||||||
|  | 	currentChar := startChar | ||||||
|  |  | ||||||
|  | 	isVowel, isConsonant := ppCharType(currentChar) | ||||||
|  | 	if isVowel { | ||||||
|  | 		vowelCount = 1 | ||||||
|  | 	} | ||||||
|  | 	if isConsonant { | ||||||
|  | 		consoCount = ppMaxRepeatedConsonant | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	segmentLen := 1 | ||||||
|  |  | ||||||
|  | 	segmentLenTarget := ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||||
|  |  | ||||||
|  | 	for len(result) < pwlen { | ||||||
|  |  | ||||||
|  | 		charset := ppContinuation[currentChar] | ||||||
|  | 		if vowelCount >= ppMaxRepeatedVowel { | ||||||
|  | 			charset = ppCharsetRemove(charset, ppVowelMap, false) | ||||||
|  | 		} | ||||||
|  | 		if consoCount >= ppMaxRepeatedConsonant { | ||||||
|  | 			charset = ppCharsetRemove(charset, ppConsonantMap, false) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		lastOfSegment := false | ||||||
|  | 		newSegment := false | ||||||
|  |  | ||||||
|  | 		if len(result)+1 == pwlen { | ||||||
|  | 			// last of result | ||||||
|  | 			charset = ppCharsetFilter(charset, ppEndCharMap, false) | ||||||
|  | 		} else if segmentLen+1 == segmentLenTarget { | ||||||
|  | 			// last of segment | ||||||
|  | 			charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||||
|  | 			if charsetNew != "" { | ||||||
|  | 				charset = charsetNew | ||||||
|  | 				lastOfSegment = true | ||||||
|  | 			} | ||||||
|  | 		} else if segmentLen >= segmentLenTarget { | ||||||
|  | 			// (perhaps) start of new segment | ||||||
|  | 			if _, ok := ppEndCharMap[currentChar]; ok { | ||||||
|  | 				charset = ppStartChar | ||||||
|  | 				newSegment = true | ||||||
|  | 			} else { | ||||||
|  | 				// continue segment for one more char to (hopefully) find an end-char | ||||||
|  | 				charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||||
|  | 				if charsetNew != "" { | ||||||
|  | 					charset = charsetNew | ||||||
|  | 					lastOfSegment = true | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 		} else { | ||||||
|  | 			// normal continuation | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		newChar := ppRand(rng, charset, &entropy) | ||||||
|  | 		if lastOfSegment { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen++ | ||||||
|  | 			result += strings.ToLower(string(newChar)) | ||||||
|  | 		} else if newSegment { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen = 1 | ||||||
|  | 			result += strings.ToUpper(string(newChar)) | ||||||
|  | 			segmentLenTarget = ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||||
|  | 			vowelCount = 0 | ||||||
|  | 			consoCount = 0 | ||||||
|  | 		} else { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen++ | ||||||
|  | 			result += strings.ToLower(string(newChar)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		isVowel, isConsonant := ppCharType(currentChar) | ||||||
|  | 		if isVowel { | ||||||
|  | 			vowelCount++ | ||||||
|  | 			consoCount = 0 | ||||||
|  | 		} | ||||||
|  | 		if isConsonant { | ||||||
|  | 			vowelCount = 0 | ||||||
|  | 			if newSegment { | ||||||
|  | 				consoCount = ppMaxRepeatedConsonant | ||||||
|  | 			} else { | ||||||
|  | 				consoCount++ | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, entropy | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePassword(len int) string { | ||||||
|  | 	v, _ := PronouncablePasswordExt(rand.Reader, len) | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePasswordSeeded(seed int64, len int) string { | ||||||
|  |  | ||||||
|  | 	v, _ := PronouncablePasswordExt(mathrand.New(mathrand.NewSource(seed)), len) | ||||||
|  | 	return v | ||||||
|  | } | ||||||
							
								
								
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | |||||||
|  | package cryptext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"math/rand" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordExt(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw, entropy := PronouncablePasswordExt(rand.New(rand.NewSource(int64(i))), 16) | ||||||
|  | 		fmt.Printf("[%.2f] => %s\n", entropy, pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordSeeded(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw := PronouncablePasswordSeeded(int64(i), 8) | ||||||
|  | 		fmt.Printf("%s\n", pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePassword(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw := PronouncablePassword(i + 1) | ||||||
|  | 		fmt.Printf("%s\n", pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordWrongLen(t *testing.T) { | ||||||
|  | 	PronouncablePassword(0) | ||||||
|  | 	PronouncablePassword(-1) | ||||||
|  | 	PronouncablePassword(-2) | ||||||
|  | 	PronouncablePassword(-3) | ||||||
|  | } | ||||||
| @@ -4,6 +4,10 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | type RawFilter interface { | ||||||
|  | 	FilterQuery() mongo.Pipeline | ||||||
|  | } | ||||||
|  |  | ||||||
| type Filter interface { | type Filter interface { | ||||||
| 	FilterQuery() mongo.Pipeline | 	FilterQuery() mongo.Pipeline | ||||||
| 	Pagination() (string, SortDirection, string, SortDirection) | 	Pagination() (string, SortDirection, string, SortDirection) | ||||||
|   | |||||||
							
								
								
									
										113
									
								
								dataext/syncMap.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										113
									
								
								dataext/syncMap.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,113 @@ | |||||||
|  | package dataext | ||||||
|  |  | ||||||
|  | import "sync" | ||||||
|  |  | ||||||
|  | type SyncMap[TKey comparable, TData any] struct { | ||||||
|  | 	data map[TKey]TData | ||||||
|  | 	lock sync.Mutex | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Set(key TKey, data TData) { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	s.data[key] = data | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) SetIfNotContains(key TKey, data TData) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if _, existsInPreState := s.data[key]; existsInPreState { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	s.data[key] = data | ||||||
|  |  | ||||||
|  | 	return true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v, ok := s.data[key]; ok { | ||||||
|  | 		return v, true | ||||||
|  | 	} else { | ||||||
|  | 		return *new(TData), false | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Delete(key TKey) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	_, ok := s.data[key] | ||||||
|  |  | ||||||
|  | 	delete(s.data, key) | ||||||
|  |  | ||||||
|  | 	return ok | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Contains(key TKey) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	_, ok := s.data[key] | ||||||
|  |  | ||||||
|  | 	return ok | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAllKeys() []TKey { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := make([]TKey, 0, len(s.data)) | ||||||
|  |  | ||||||
|  | 	for k := range s.data { | ||||||
|  | 		r = append(r, k) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAllValues() []TData { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := make([]TData, 0, len(s.data)) | ||||||
|  |  | ||||||
|  | 	for _, v := range s.data { | ||||||
|  | 		r = append(r, v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
| @@ -15,10 +15,17 @@ type StringEnum interface { | |||||||
| type DescriptionEnum interface { | type DescriptionEnum interface { | ||||||
| 	Enum | 	Enum | ||||||
| 	Description() string | 	Description() string | ||||||
|  | 	DescriptionMeta() EnumDescriptionMetaValue | ||||||
| } | } | ||||||
|  |  | ||||||
| type EnumMetaValue struct { | type EnumMetaValue struct { | ||||||
| 	VarName     string  `json:"varName"` | 	VarName     string  `json:"varName"` | ||||||
| 	Value       any     `json:"value"` | 	Value       Enum    `json:"value"` | ||||||
| 	Description *string `json:"description"` | 	Description *string `json:"description"` | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type EnumDescriptionMetaValue struct { | ||||||
|  | 	VarName     string `json:"varName"` | ||||||
|  | 	Value       Enum   `json:"value"` | ||||||
|  | 	Description string `json:"description"` | ||||||
|  | } | ||||||
|   | |||||||
| @@ -68,8 +68,10 @@ func init() { | |||||||
| } | } | ||||||
|  |  | ||||||
| type Builder struct { | type Builder struct { | ||||||
|  | 	wrappedErr      error | ||||||
| 	errorData       *ExErr | 	errorData       *ExErr | ||||||
| 	containsGinData bool | 	containsGinData bool | ||||||
|  | 	noLog           bool | ||||||
| } | } | ||||||
|  |  | ||||||
| func Get(err error) *Builder { | func Get(err error) *Builder { | ||||||
| @@ -88,9 +90,9 @@ func Wrap(err error, msg string) *Builder { | |||||||
| 	if !pkgconfig.RecursiveErrors { | 	if !pkgconfig.RecursiveErrors { | ||||||
| 		v := FromError(err) | 		v := FromError(err) | ||||||
| 		v.Message = msg | 		v.Message = msg | ||||||
| 		return &Builder{errorData: v} | 		return &Builder{wrappedErr: err, errorData: v} | ||||||
| 	} | 	} | ||||||
| 	return &Builder{errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | 	return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | ||||||
| } | } | ||||||
|  |  | ||||||
| // ---------------------------------------------------------------------------- | // ---------------------------------------------------------------------------- | ||||||
| @@ -190,6 +192,13 @@ func (b *Builder) System() *Builder { | |||||||
|  |  | ||||||
| // ---------------------------------------------------------------------------- | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | func (b *Builder) NoLog() *Builder { | ||||||
|  | 	b.noLog = true | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
| func (b *Builder) Id(key string, val fmt.Stringer) *Builder { | func (b *Builder) Id(key string, val fmt.Stringer) *Builder { | ||||||
| 	return b.addMeta(key, MDTID, newIDWrap(val)) | 	return b.addMeta(key, MDTID, newIDWrap(val)) | ||||||
| } | } | ||||||
| @@ -401,16 +410,22 @@ func extractHeader(header map[string][]string) []string { | |||||||
|  |  | ||||||
| // Build creates a new error, ready to pass up the stack | // Build creates a new error, ready to pass up the stack | ||||||
| // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | ||||||
|  | // Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces | ||||||
|  | // Can be locally suppressed with Builder.NoLog() | ||||||
| func (b *Builder) Build() error { | func (b *Builder) Build() error { | ||||||
| 	warnOnPkgConfigNotInitialized() | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
| 	if pkgconfig.ZeroLogErrTraces && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | 	if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil { | ||||||
|  | 		return b.wrappedErr | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} else if pkgconfig.ZeroLogAllTraces { | 	} else if pkgconfig.ZeroLogAllTraces && !b.noLog { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodBuild) | 	b.errorData.CallListener(MethodBuild) | ||||||
|  |  | ||||||
| 	return b.errorData | 	return b.errorData | ||||||
| } | } | ||||||
| @@ -432,7 +447,7 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) { | |||||||
| 		b.errorData.Log(stackSkipLogger.Warn()) | 		b.errorData.Log(stackSkipLogger.Warn()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodOutput) | 	b.errorData.CallListener(MethodOutput) | ||||||
| } | } | ||||||
|  |  | ||||||
| // Print prints the error | // Print prints the error | ||||||
| @@ -444,7 +459,7 @@ func (b *Builder) Print() { | |||||||
| 		b.errorData.ShortLog(stackSkipLogger.Warn()) | 		b.errorData.ShortLog(stackSkipLogger.Warn()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodPrint) | 	b.errorData.CallListener(MethodPrint) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) Format(level LogPrintLevel) string { | func (b *Builder) Format(level LogPrintLevel) string { | ||||||
| @@ -457,7 +472,7 @@ func (b *Builder) Fatal() { | |||||||
| 	b.errorData.Severity = SevFatal | 	b.errorData.Severity = SevFatal | ||||||
| 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | ||||||
|  |  | ||||||
| 	b.CallListener(MethodFatal) | 	b.errorData.CallListener(MethodFatal) | ||||||
|  |  | ||||||
| 	os.Exit(1) | 	os.Exit(1) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -46,6 +46,11 @@ var ( | |||||||
| 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | ||||||
| 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) | 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) | ||||||
| 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | ||||||
|  | 	TypeMongoInvalidOpt   = NewType("MONGO_INVALIDOPT", langext.Ptr(500)) | ||||||
|  |  | ||||||
|  | 	TypeSQLQuery  = NewType("SQL_QUERY", langext.Ptr(500)) | ||||||
|  | 	TypeSQLBuild  = NewType("SQL_BUILD", langext.Ptr(500)) | ||||||
|  | 	TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500)) | ||||||
|  |  | ||||||
| 	TypeWrap = NewType("Wrap", nil) | 	TypeWrap = NewType("Wrap", nil) | ||||||
|  |  | ||||||
| @@ -58,21 +63,27 @@ var ( | |||||||
| 	TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400)) | 	TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400)) | ||||||
| 	TypeInvalidCSID     = NewType("INVALID_CSID", langext.Ptr(400)) | 	TypeInvalidCSID     = NewType("INVALID_CSID", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400)) | ||||||
|  | 	TypeGoogleResponse   = NewType("GOOGLE_RESPONSE", langext.Ptr(400)) | ||||||
|  |  | ||||||
| 	TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401)) | 	TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401)) | ||||||
| 	TypeAuthFailed   = NewType("AUTH_FAILED", langext.Ptr(401)) | 	TypeAuthFailed   = NewType("AUTH_FAILED", langext.Ptr(401)) | ||||||
|  |  | ||||||
| 	// other values come the used package | 	// other values come from the downstream application that uses goext | ||||||
| ) | ) | ||||||
|  |  | ||||||
| var registeredTypes = dataext.SyncSet[string]{} | var registeredTypes = dataext.SyncMap[string, ErrorType]{} | ||||||
|  |  | ||||||
| func NewType(key string, defStatusCode *int) ErrorType { | func NewType(key string, defStatusCode *int) ErrorType { | ||||||
| 	insertOkay := registeredTypes.Add(key) | 	et := ErrorType{key, defStatusCode} | ||||||
| 	if !insertOkay { |  | ||||||
| 		panic("Cannot register same ErrType ('" + key + "') more than once") | 	registeredTypes.Set(key, et) | ||||||
|  |  | ||||||
|  | 	return et | ||||||
| } | } | ||||||
|  |  | ||||||
| 	return ErrorType{key, defStatusCode} | func ListRegisteredTypes() []ErrorType { | ||||||
|  | 	return registeredTypes.GetAllValues() | ||||||
| } | } | ||||||
|  |  | ||||||
| type LogPrintLevel string | type LogPrintLevel string | ||||||
|   | |||||||
| @@ -13,6 +13,7 @@ type ErrorPackageConfig struct { | |||||||
| 	IncludeMetaInGinOutput bool                                             // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output() | 	IncludeMetaInGinOutput bool                                             // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output() | ||||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any)            // (Optionally) extend the gin output with more fields | 	ExtendGinOutput        func(err *ExErr, json map[string]any)            // (Optionally) extend the gin output with more fields | ||||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields | 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields | ||||||
|  | 	DisableErrorWrapping   bool                                             // Disables the exerr.Wrap()...Build() function - will always return the original error | ||||||
| } | } | ||||||
|  |  | ||||||
| type ErrorPackageConfigInit struct { | type ErrorPackageConfigInit struct { | ||||||
| @@ -23,6 +24,7 @@ type ErrorPackageConfigInit struct { | |||||||
| 	IncludeMetaInGinOutput *bool | 	IncludeMetaInGinOutput *bool | ||||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any) | 	ExtendGinOutput        func(err *ExErr, json map[string]any) | ||||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | ||||||
|  | 	DisableErrorWrapping   *bool | ||||||
| } | } | ||||||
|  |  | ||||||
| var initialized = false | var initialized = false | ||||||
| @@ -35,6 +37,7 @@ var pkgconfig = ErrorPackageConfig{ | |||||||
| 	IncludeMetaInGinOutput: true, | 	IncludeMetaInGinOutput: true, | ||||||
| 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | ||||||
| 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | ||||||
|  | 	DisableErrorWrapping:   false, | ||||||
| } | } | ||||||
|  |  | ||||||
| // Init initializes the exerr packages | // Init initializes the exerr packages | ||||||
| @@ -63,11 +66,16 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
| 		IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput), | 		IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput), | ||||||
| 		ExtendGinOutput:        ego, | 		ExtendGinOutput:        ego, | ||||||
| 		ExtendGinDataOutput:    egdo, | 		ExtendGinDataOutput:    egdo, | ||||||
|  | 		DisableErrorWrapping:   langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping), | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	initialized = true | 	initialized = true | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func Initialized() bool { | ||||||
|  | 	return initialized | ||||||
|  | } | ||||||
|  |  | ||||||
| func warnOnPkgConfigNotInitialized() { | func warnOnPkgConfigNotInitialized() { | ||||||
| 	if !initialized { | 	if !initialized { | ||||||
| 		fmt.Printf("\n") | 		fmt.Printf("\n") | ||||||
|   | |||||||
| @@ -169,14 +169,32 @@ func (ee *ExErr) ShortLog(evt *zerolog.Event) { | |||||||
|  |  | ||||||
| // RecursiveMessage returns the message to show | // RecursiveMessage returns the message to show | ||||||
| // = first error (top-down) that is not wrapping/foreign/empty | // = first error (top-down) that is not wrapping/foreign/empty | ||||||
|  | // = lowest level error (that is not empty) | ||||||
|  | // = fallback to self.message | ||||||
| func (ee *ExErr) RecursiveMessage() string { | func (ee *ExErr) RecursiveMessage() string { | ||||||
|  |  | ||||||
|  | 	// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty | ||||||
|  |  | ||||||
| 	for curr := ee; curr != nil; curr = curr.OriginalError { | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
| 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | ||||||
| 			return curr.Message | 			return curr.Message | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// fallback to self | 	// ==== [2] ==== lowest level error (that is not empty) | ||||||
|  |  | ||||||
|  | 	deepestMsg := "" | ||||||
|  | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
|  | 		if curr.Message != "" { | ||||||
|  | 			deepestMsg = curr.Message | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if deepestMsg != "" { | ||||||
|  | 		return deepestMsg | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// ==== [3] ==== fallback to self.message | ||||||
|  |  | ||||||
| 	return ee.Message | 	return ee.Message | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -240,6 +258,73 @@ func (ee *ExErr) Depth() int { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // GetMeta returns the meta value with the specified key | ||||||
|  | // this method recurses through all wrapped errors and returns the first matching meta value | ||||||
|  | func (ee *ExErr) GetMeta(key string) (any, bool) { | ||||||
|  | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
|  | 		if v, ok := curr.Meta[key]; ok { | ||||||
|  | 			return v.Value, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // GetMetaString functions the same as GetMeta, but returns false if the type does not match | ||||||
|  | func (ee *ExErr) GetMetaString(key string) (string, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(string); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaBool(key string) (bool, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(bool); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return false, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaInt(key string) (int, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(int); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return 0, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaFloat32(key string) (float32, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(float32); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return 0, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaFloat64(key string) (float64, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(float64); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return 0, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaTime(key string) (time.Time, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(time.Time); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return time.Time{}, false | ||||||
|  | } | ||||||
|  |  | ||||||
| // contains test if the supplied error is contained in this error (anywhere in the chain) | // contains test if the supplied error is contained in this error (anywhere in the chain) | ||||||
| func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) { | func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) { | ||||||
| 	if original == nil { | 	if original == nil { | ||||||
|   | |||||||
							
								
								
									
										13
									
								
								exerr/gin.go
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								exerr/gin.go
									
									
									
									
									
								
							| @@ -57,6 +57,19 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | |||||||
| 	return ginJson | 	return ginJson | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) ToDefaultAPIJson() (string, error) { | ||||||
|  |  | ||||||
|  | 	gjr := json.GoJsonRender{Data: ee.ToAPIJson(true, pkgconfig.ExtendedGinOutput, pkgconfig.IncludeMetaInGinOutput), NilSafeSlices: true, NilSafeMaps: true} | ||||||
|  |  | ||||||
|  | 	r, err := gjr.RenderString() | ||||||
|  |  | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| // ToAPIJson converts the ExError to a json object | // ToAPIJson converts the ExError to a json object | ||||||
| // (the same object as used in the Output(gin) method) | // (the same object as used in the Output(gin) method) | ||||||
| // | // | ||||||
|   | |||||||
| @@ -25,13 +25,11 @@ func RegisterListener(l Listener) { | |||||||
| 	listener = append(listener, l) | 	listener = append(listener, l) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) CallListener(m Method) { | func (ee *ExErr) CallListener(m Method) { | ||||||
| 	valErr := b.errorData |  | ||||||
|  |  | ||||||
| 	listenerLock.Lock() | 	listenerLock.Lock() | ||||||
| 	defer listenerLock.Unlock() | 	defer listenerLock.Unlock() | ||||||
|  |  | ||||||
| 	for _, v := range listener { | 	for _, v := range listener { | ||||||
| 		v(m, valErr) | 		v(m, ee) | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -6,20 +6,25 @@ import ( | |||||||
| 	"github.com/rs/zerolog/log" | 	"github.com/rs/zerolog/log" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/mathext" | 	"gogs.mikescher.com/BlackForestBytes/goext/mathext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| 	"net" | 	"net" | ||||||
| 	"net/http" | 	"net/http" | ||||||
|  | 	"net/http/httptest" | ||||||
|  | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type GinWrapper struct { | type GinWrapper struct { | ||||||
| 	engine          *gin.Engine | 	engine          *gin.Engine | ||||||
| 	SuppressGinLogs bool | 	suppressGinLogs bool | ||||||
|  |  | ||||||
| 	allowCors             bool | 	allowCors             bool | ||||||
| 	ginDebug              bool | 	ginDebug              bool | ||||||
| 	bufferBody            bool | 	bufferBody            bool | ||||||
| 	requestTimeout        time.Duration | 	requestTimeout        time.Duration | ||||||
|  | 	listenerBeforeRequest []func(g *gin.Context) | ||||||
|  | 	listenerAfterRequest  []func(g *gin.Context, resp HTTPResponse) | ||||||
|  |  | ||||||
| 	routeSpecs []ginRouteSpec | 	routeSpecs []ginRouteSpec | ||||||
| } | } | ||||||
| @@ -31,40 +36,46 @@ type ginRouteSpec struct { | |||||||
| 	Handler     string | 	Handler     string | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type Options struct { | ||||||
|  | 	AllowCors             *bool                                     // Add cors handler to allow all CORS requests on the default http methods | ||||||
|  | 	GinDebug              *bool                                     // Set gin.debug to true (adds more logs) | ||||||
|  | 	BufferBody            *bool                                     // Buffers the input body stream, this way the ginext error handler can later include the whole request body | ||||||
|  | 	Timeout               *time.Duration                            // The default handler timeout | ||||||
|  | 	ListenerBeforeRequest []func(g *gin.Context)                    // Register listener that are called before the handler method | ||||||
|  | 	ListenerAfterRequest  []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method | ||||||
|  | } | ||||||
|  |  | ||||||
| // NewEngine creates a new (wrapped) ginEngine | // NewEngine creates a new (wrapped) ginEngine | ||||||
| // Parameters are: | func NewEngine(opt Options) *GinWrapper { | ||||||
| // - [allowCors]    Add cors handler to allow all CORS requests on the default http methods |  | ||||||
| // - [ginDebug]     Set gin.debug to true (adds more logs) |  | ||||||
| // - [bufferBody]   Buffers the input body stream, this way the ginext error handler can later include the whole request body |  | ||||||
| // - [timeout]      The default handler timeout |  | ||||||
| func NewEngine(allowCors bool, ginDebug bool, bufferBody bool, timeout time.Duration) *GinWrapper { |  | ||||||
| 	engine := gin.New() | 	engine := gin.New() | ||||||
|  |  | ||||||
| 	wrapper := &GinWrapper{ | 	wrapper := &GinWrapper{ | ||||||
| 		engine:                engine, | 		engine:                engine, | ||||||
| 		SuppressGinLogs: false, | 		suppressGinLogs:       false, | ||||||
| 		allowCors:       allowCors, | 		allowCors:             langext.Coalesce(opt.AllowCors, false), | ||||||
| 		ginDebug:        ginDebug, | 		ginDebug:              langext.Coalesce(opt.GinDebug, true), | ||||||
| 		bufferBody:      bufferBody, | 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | ||||||
| 		requestTimeout:  timeout, | 		requestTimeout:        langext.Coalesce(opt.Timeout, 24*time.Hour), | ||||||
|  | 		listenerBeforeRequest: opt.ListenerBeforeRequest, | ||||||
|  | 		listenerAfterRequest:  opt.ListenerAfterRequest, | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	engine.RedirectFixedPath = false | 	engine.RedirectFixedPath = false | ||||||
| 	engine.RedirectTrailingSlash = false | 	engine.RedirectTrailingSlash = false | ||||||
|  |  | ||||||
| 	if allowCors { | 	if wrapper.allowCors { | ||||||
| 		engine.Use(CorsMiddleware()) | 		engine.Use(CorsMiddleware()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// do not debug-print routes | 	// do not debug-print routes | ||||||
| 	gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {} | 	gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {} | ||||||
|  |  | ||||||
| 	if !ginDebug { | 	if !wrapper.ginDebug { | ||||||
| 		gin.SetMode(gin.ReleaseMode) | 		gin.SetMode(gin.ReleaseMode) | ||||||
|  |  | ||||||
| 		ginlogger := gin.Logger() | 		ginlogger := gin.Logger() | ||||||
| 		engine.Use(func(context *gin.Context) { | 		engine.Use(func(context *gin.Context) { | ||||||
| 			if !wrapper.SuppressGinLogs { | 			if !wrapper.suppressGinLogs { | ||||||
| 				ginlogger(context) | 				ginlogger(context) | ||||||
| 			} | 			} | ||||||
| 		}) | 		}) | ||||||
| @@ -126,8 +137,8 @@ func (w *GinWrapper) DebugPrintRoutes() { | |||||||
| 		line := [4]string{ | 		line := [4]string{ | ||||||
| 			spec.Method, | 			spec.Method, | ||||||
| 			spec.URL, | 			spec.URL, | ||||||
| 			strings.Join(spec.Middlewares, " -> "), | 			strings.Join(langext.ArrMap(spec.Middlewares, w.cleanMiddlewareName), " -> "), | ||||||
| 			spec.Handler, | 			w.cleanMiddlewareName(spec.Handler), | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		lines = append(lines, line) | 		lines = append(lines, line) | ||||||
| @@ -138,12 +149,47 @@ func (w *GinWrapper) DebugPrintRoutes() { | |||||||
| 		pad[3] = mathext.Max(pad[3], len(line[3])) | 		pad[3] = mathext.Max(pad[3], len(line[3])) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	fmt.Printf("Gin-Routes:\n") | ||||||
|  | 	fmt.Printf("{\n") | ||||||
| 	for _, line := range lines { | 	for _, line := range lines { | ||||||
|  |  | ||||||
| 		fmt.Printf("Gin-Route: %s  %s  -->  %s  -->  %s\n", | 		fmt.Printf(" %s  %s  -->  %s  -->  %s\n", | ||||||
| 			langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2), | 			langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2), | ||||||
| 			langext.StrPadRight(line[1], " ", pad[1]), | 			langext.StrPadRight(line[1], " ", pad[1]), | ||||||
| 			langext.StrPadRight(line[2], " ", pad[2]), | 			langext.StrPadRight(line[2], " ", pad[2]), | ||||||
| 			langext.StrPadRight(line[3], " ", pad[3])) | 			langext.StrPadRight(line[3], " ", pad[3])) | ||||||
| 	} | 	} | ||||||
|  | 	fmt.Printf("}\n") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (w *GinWrapper) cleanMiddlewareName(fname string) string { | ||||||
|  |  | ||||||
|  | 	funcSuffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`)) | ||||||
|  | 	if match, ok := funcSuffix.MatchFirst(fname); ok { | ||||||
|  | 		fname = fname[:len(fname)-match.FullMatch().Length()] | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if strings.HasSuffix(fname, ".(*GinRoutesWrapper).WithJSONFilter") { | ||||||
|  | 		fname = "[JSONFilter]" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fname == "ginext.BodyBuffer" { | ||||||
|  | 		fname = "[BodyBuffer]" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	skipPrefixes := []string{"api.(*Handler).", "api.", "ginext.", "handler.", "admin-app.", "employee-app.", "employer-app."} | ||||||
|  | 	for _, pfx := range skipPrefixes { | ||||||
|  | 		if strings.HasPrefix(fname, pfx) { | ||||||
|  | 			fname = fname[len(pfx):] | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return fname | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ServeHTTP only used for unit tests | ||||||
|  | func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder { | ||||||
|  | 	respRec := httptest.NewRecorder() | ||||||
|  | 	w.engine.ServeHTTP(respRec, req) | ||||||
|  | 	return respRec | ||||||
| } | } | ||||||
|   | |||||||
| @@ -14,7 +14,17 @@ func Wrap(w *GinWrapper, fn WHandlerFunc) gin.HandlerFunc { | |||||||
|  |  | ||||||
| 		reqctx := g.Request.Context() | 		reqctx := g.Request.Context() | ||||||
|  |  | ||||||
| 		wrap, stackTrace, panicObj := callPanicSafe(fn, PreContext{wrapper: w, ginCtx: g}) | 		pctx := PreContext{ | ||||||
|  | 			wrapper:        w, | ||||||
|  | 			ginCtx:         g, | ||||||
|  | 			persistantData: &preContextData{}, | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		for _, lstr := range w.listenerBeforeRequest { | ||||||
|  | 			lstr(g) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		wrap, stackTrace, panicObj := callPanicSafe(fn, pctx) | ||||||
| 		if panicObj != nil { | 		if panicObj != nil { | ||||||
|  |  | ||||||
| 			fmt.Printf("\n======== ======== STACKTRACE ======== ========\n%s\n======== ======== ======== ========\n\n", stackTrace) | 			fmt.Printf("\n======== ======== STACKTRACE ======== ========\n%s\n======== ======== ======== ========\n\n", stackTrace) | ||||||
| @@ -32,6 +42,17 @@ func Wrap(w *GinWrapper, fn WHandlerFunc) gin.HandlerFunc { | |||||||
| 			panic("Writing in WrapperFunc is not supported") | 			panic("Writing in WrapperFunc is not supported") | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if pctx.persistantData.sessionObj != nil { | ||||||
|  | 			err := pctx.persistantData.sessionObj.Finish(reqctx, wrap) | ||||||
|  | 			if err != nil { | ||||||
|  | 				wrap = Error(exerr.Wrap(err, "Failed to finish session").Any("originalResponse", wrap).Build()) | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		for _, lstr := range w.listenerAfterRequest { | ||||||
|  | 			lstr(g, wrap) | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		if reqctx.Err() == nil { | 		if reqctx.Err() == nil { | ||||||
| 			wrap.Write(g) | 			wrap.Write(g) | ||||||
| 		} | 		} | ||||||
|   | |||||||
| @@ -1,12 +1,15 @@ | |||||||
| package ginext | package ginext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
| 	"context" | 	"context" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"github.com/gin-gonic/gin/binding" | 	"github.com/gin-gonic/gin/binding" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"io" | ||||||
| 	"runtime/debug" | 	"runtime/debug" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -17,9 +20,15 @@ type PreContext struct { | |||||||
| 	uri            any | 	uri            any | ||||||
| 	query          any | 	query          any | ||||||
| 	body           any | 	body           any | ||||||
|  | 	rawbody        *[]byte | ||||||
| 	form           any | 	form           any | ||||||
| 	header         any | 	header         any | ||||||
| 	timeout        *time.Duration | 	timeout        *time.Duration | ||||||
|  | 	persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type preContextData struct { | ||||||
|  | 	sessionObj SessionObject | ||||||
| } | } | ||||||
|  |  | ||||||
| func (pctx *PreContext) URI(uri any) *PreContext { | func (pctx *PreContext) URI(uri any) *PreContext { | ||||||
| @@ -37,6 +46,11 @@ func (pctx *PreContext) Body(body any) *PreContext { | |||||||
| 	return pctx | 	return pctx | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (pctx *PreContext) RawBody(rawbody *[]byte) *PreContext { | ||||||
|  | 	pctx.rawbody = rawbody | ||||||
|  | 	return pctx | ||||||
|  | } | ||||||
|  |  | ||||||
| func (pctx *PreContext) Form(form any) *PreContext { | func (pctx *PreContext) Form(form any) *PreContext { | ||||||
| 	pctx.form = form | 	pctx.form = form | ||||||
| 	return pctx | 	return pctx | ||||||
| @@ -52,6 +66,11 @@ func (pctx *PreContext) WithTimeout(to time.Duration) *PreContext { | |||||||
| 	return pctx | 	return pctx | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (pctx *PreContext) WithSession(sessionObj SessionObject) *PreContext { | ||||||
|  | 	pctx.persistantData.sessionObj = sessionObj | ||||||
|  | 	return pctx | ||||||
|  | } | ||||||
|  |  | ||||||
| func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||||
| 	if pctx.uri != nil { | 	if pctx.uri != nil { | ||||||
| 		if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil { | 		if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil { | ||||||
| @@ -90,6 +109,23 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if pctx.rawbody != nil { | ||||||
|  | 		if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok { | ||||||
|  | 			v, err := brc.BufferedAll() | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, nil, langext.Ptr(Error(err)) | ||||||
|  | 			} | ||||||
|  | 			*pctx.rawbody = v | ||||||
|  | 		} else { | ||||||
|  | 			buf := &bytes.Buffer{} | ||||||
|  | 			_, err := io.Copy(buf, pctx.ginCtx.Request.Body) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, nil, langext.Ptr(Error(err)) | ||||||
|  | 			} | ||||||
|  | 			*pctx.rawbody = buf.Bytes() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if pctx.form != nil { | 	if pctx.form != nil { | ||||||
| 		if pctx.ginCtx.ContentType() == "multipart/form-data" { | 		if pctx.ginCtx.ContentType() == "multipart/form-data" { | ||||||
| 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | ||||||
| @@ -126,6 +162,15 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout)) | 	ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout)) | ||||||
|  |  | ||||||
|  | 	if pctx.persistantData.sessionObj != nil { | ||||||
|  | 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, ictx) | ||||||
|  | 		if err != nil { | ||||||
|  | 			cancel() | ||||||
|  | 			return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build())) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | ||||||
|  |  | ||||||
| 	return actx, pctx.ginCtx, nil | 	return actx, pctx.ginCtx, nil | ||||||
|   | |||||||
| @@ -5,8 +5,20 @@ import ( | |||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	json "gogs.mikescher.com/BlackForestBytes/goext/gojson" | 	json "gogs.mikescher.com/BlackForestBytes/goext/gojson" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"os" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | type cookieval struct { | ||||||
|  | 	name     string | ||||||
|  | 	value    string | ||||||
|  | 	maxAge   int | ||||||
|  | 	path     string | ||||||
|  | 	domain   string | ||||||
|  | 	secure   bool | ||||||
|  | 	httpOnly bool | ||||||
|  | } | ||||||
|  |  | ||||||
| type headerval struct { | type headerval struct { | ||||||
| 	Key string | 	Key string | ||||||
| 	Val string | 	Val string | ||||||
| @@ -15,23 +27,42 @@ type headerval struct { | |||||||
| type HTTPResponse interface { | type HTTPResponse interface { | ||||||
| 	Write(g *gin.Context) | 	Write(g *gin.Context) | ||||||
| 	WithHeader(k string, v string) HTTPResponse | 	WithHeader(k string, v string) HTTPResponse | ||||||
|  | 	WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse | ||||||
|  | 	IsSuccess() bool | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type InspectableHTTPResponse interface { | ||||||
|  | 	HTTPResponse | ||||||
|  |  | ||||||
|  | 	Statuscode() int | ||||||
|  | 	BodyString(g *gin.Context) *string | ||||||
|  | 	ContentType() string | ||||||
|  | 	Headers() []string | ||||||
| } | } | ||||||
|  |  | ||||||
| type jsonHTTPResponse struct { | type jsonHTTPResponse struct { | ||||||
| 	statusCode int | 	statusCode int | ||||||
| 	data       any | 	data       any | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender { | ||||||
|  | 	var f *string | ||||||
|  | 	if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" { | ||||||
|  | 		f = &jsonfilter | ||||||
|  | 	} | ||||||
|  | 	return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f} | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j jsonHTTPResponse) Write(g *gin.Context) { | func (j jsonHTTPResponse) Write(g *gin.Context) { | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
| 	var f *string | 	for _, v := range j.cookies { | ||||||
| 	if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" { | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
| 		f = &jsonfilter |  | ||||||
| 	} | 	} | ||||||
| 	g.Render(j.statusCode, json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f}) | 	g.Render(j.statusCode, j.jsonRenderer(g)) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse { | func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
| @@ -39,15 +70,48 @@ func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) BodyString(g *gin.Context) *string { | ||||||
|  | 	if str, err := j.jsonRenderer(g).RenderString(); err == nil { | ||||||
|  | 		return &str | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) ContentType() string { | ||||||
|  | 	return "application/json" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
| type emptyHTTPResponse struct { | type emptyHTTPResponse struct { | ||||||
| 	statusCode int | 	statusCode int | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j emptyHTTPResponse) Write(g *gin.Context) { | func (j emptyHTTPResponse) Write(g *gin.Context) { | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Status(j.statusCode) | 	g.Status(j.statusCode) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -56,16 +120,45 @@ func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) ContentType() string { | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
| type textHTTPResponse struct { | type textHTTPResponse struct { | ||||||
| 	statusCode int | 	statusCode int | ||||||
| 	data       string | 	data       string | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j textHTTPResponse) Write(g *gin.Context) { | func (j textHTTPResponse) Write(g *gin.Context) { | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.String(j.statusCode, "%s", j.data) | 	g.String(j.statusCode, "%s", j.data) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -74,17 +167,46 @@ func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return langext.Ptr(j.data) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) ContentType() string { | ||||||
|  | 	return "text/plain" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
| type dataHTTPResponse struct { | type dataHTTPResponse struct { | ||||||
| 	statusCode  int | 	statusCode  int | ||||||
| 	data        []byte | 	data        []byte | ||||||
| 	contentType string | 	contentType string | ||||||
| 	headers     []headerval | 	headers     []headerval | ||||||
|  | 	cookies     []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j dataHTTPResponse) Write(g *gin.Context) { | func (j dataHTTPResponse) Write(g *gin.Context) { | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Data(j.statusCode, j.contentType, j.data) | 	g.Data(j.statusCode, j.contentType, j.data) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -93,11 +215,37 @@ func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return langext.Ptr(string(j.data)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) ContentType() string { | ||||||
|  | 	return j.contentType | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
| type fileHTTPResponse struct { | type fileHTTPResponse struct { | ||||||
| 	mimetype string | 	mimetype string | ||||||
| 	filepath string | 	filepath string | ||||||
| 	filename *string | 	filename *string | ||||||
| 	headers  []headerval | 	headers  []headerval | ||||||
|  | 	cookies  []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j fileHTTPResponse) Write(g *gin.Context) { | func (j fileHTTPResponse) Write(g *gin.Context) { | ||||||
| @@ -109,6 +257,9 @@ func (j fileHTTPResponse) Write(g *gin.Context) { | |||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.File(j.filepath) | 	g.File(j.filepath) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -117,23 +268,55 @@ func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) Statuscode() int { | ||||||
|  | 	return 200 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	data, err := os.ReadFile(j.filepath) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return langext.Ptr(string(data)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) ContentType() string { | ||||||
|  | 	return j.mimetype | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
| type downloadDataHTTPResponse struct { | type downloadDataHTTPResponse struct { | ||||||
| 	statusCode int | 	statusCode int | ||||||
| 	mimetype   string | 	mimetype   string | ||||||
| 	data       []byte | 	data       []byte | ||||||
| 	filename   *string | 	filename   *string | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j downloadDataHTTPResponse) Write(g *gin.Context) { | func (j downloadDataHTTPResponse) Write(g *gin.Context) { | ||||||
| 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... | 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... | ||||||
| 	if j.filename != nil { | 	if j.filename != nil { | ||||||
| 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) | 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Data(j.statusCode, j.mimetype, j.data) | 	g.Data(j.statusCode, j.mimetype, j.data) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -142,13 +325,45 @@ func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return langext.Ptr(string(j.data)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) ContentType() string { | ||||||
|  | 	return j.mimetype | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
| type redirectHTTPResponse struct { | type redirectHTTPResponse struct { | ||||||
| 	statusCode int | 	statusCode int | ||||||
| 	url        string | 	url        string | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j redirectHTTPResponse) Write(g *gin.Context) { | func (j redirectHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Redirect(j.statusCode, j.url) | 	g.Redirect(j.statusCode, j.url) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -157,13 +372,47 @@ func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) ContentType() string { | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
| type jsonAPIErrResponse struct { | type jsonAPIErrResponse struct { | ||||||
| 	err     *exerr.ExErr | 	err     *exerr.ExErr | ||||||
| 	headers []headerval | 	headers []headerval | ||||||
|  | 	cookies []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j jsonAPIErrResponse) Write(g *gin.Context) { | func (j jsonAPIErrResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	j.err.Output(g) | 	j.err.Output(g) | ||||||
|  |  | ||||||
|  | 	j.err.CallListener(exerr.MethodOutput) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
| @@ -171,6 +420,39 @@ func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) IsSuccess() bool { | ||||||
|  | 	return false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) Statuscode() int { | ||||||
|  | 	return langext.Coalesce(j.err.RecursiveStatuscode(), 0) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	if str, err := j.err.ToDefaultAPIJson(); err == nil { | ||||||
|  | 		return &str | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) ContentType() string { | ||||||
|  | 	return "application/json" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) Unwrap() error { | ||||||
|  | 	return j.err | ||||||
|  | } | ||||||
|  |  | ||||||
| func Status(sc int) HTTPResponse { | func Status(sc int) HTTPResponse { | ||||||
| 	return &emptyHTTPResponse{statusCode: sc} | 	return &emptyHTTPResponse{statusCode: sc} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -3,11 +3,9 @@ package ginext | |||||||
| import ( | import ( | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" |  | ||||||
| 	"net/http" | 	"net/http" | ||||||
| 	"path" | 	"path" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"regexp" |  | ||||||
| 	"runtime" | 	"runtime" | ||||||
| 	"strings" | 	"strings" | ||||||
| ) | ) | ||||||
| @@ -196,12 +194,6 @@ func nameOfFunction(f any) string { | |||||||
| 		fname = fname[:len(fname)-len("-fm")] | 		fname = fname[:len(fname)-len("-fm")] | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	suffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`)) |  | ||||||
|  |  | ||||||
| 	if match, ok := suffix.MatchFirst(fname); ok { |  | ||||||
| 		fname = fname[:len(fname)-match.FullMatch().Length()] |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return fname | 	return fname | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										11
									
								
								ginext/session.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								ginext/session.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type SessionObject interface { | ||||||
|  | 	Init(g *gin.Context, ctx context.Context) error | ||||||
|  | 	Finish(ctx context.Context, resp HTTPResponse) error | ||||||
|  | } | ||||||
							
								
								
									
										40
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										40
									
								
								go.mod
									
									
									
									
									
								
							| @@ -1,49 +1,57 @@ | |||||||
| module gogs.mikescher.com/BlackForestBytes/goext | module gogs.mikescher.com/BlackForestBytes/goext | ||||||
|  |  | ||||||
| go 1.19 | go 1.21 | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/gin-gonic/gin v1.9.1 | 	github.com/gin-gonic/gin v1.9.1 | ||||||
|  | 	github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.- | ||||||
| 	github.com/jmoiron/sqlx v1.3.5 | 	github.com/jmoiron/sqlx v1.3.5 | ||||||
| 	github.com/rs/xid v1.5.0 | 	github.com/rs/xid v1.5.0 | ||||||
| 	github.com/rs/zerolog v1.31.0 | 	github.com/rs/zerolog v1.31.0 | ||||||
| 	go.mongodb.org/mongo-driver v1.12.1 | 	go.mongodb.org/mongo-driver v1.13.1 | ||||||
| 	golang.org/x/crypto v0.14.0 | 	golang.org/x/crypto v0.18.0 | ||||||
| 	golang.org/x/sys v0.13.0 | 	golang.org/x/sys v0.16.0 | ||||||
| 	golang.org/x/term v0.13.0 | 	golang.org/x/term v0.16.0 | ||||||
| ) | ) | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/bytedance/sonic v1.10.2 // indirect | 	github.com/bytedance/sonic v1.10.2 // indirect | ||||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | ||||||
| 	github.com/chenzhuoyu/iasm v0.9.0 // indirect | 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | ||||||
|  | 	github.com/dustin/go-humanize v1.0.1 // indirect | ||||||
| 	github.com/gabriel-vasile/mimetype v1.4.3 // indirect | 	github.com/gabriel-vasile/mimetype v1.4.3 // indirect | ||||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||||
| 	github.com/go-playground/locales v0.14.1 // indirect | 	github.com/go-playground/locales v0.14.1 // indirect | ||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| 	github.com/go-playground/validator/v10 v10.15.5 // indirect | 	github.com/go-playground/validator/v10 v10.16.0 // indirect | ||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.2 // indirect | ||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
|  | 	github.com/google/uuid v1.5.0 // indirect | ||||||
| 	github.com/json-iterator/go v1.1.12 // indirect | 	github.com/json-iterator/go v1.1.12 // indirect | ||||||
| 	github.com/klauspost/compress v1.17.2 // indirect | 	github.com/klauspost/compress v1.17.4 // indirect | ||||||
| 	github.com/klauspost/cpuid/v2 v2.2.5 // indirect | 	github.com/klauspost/cpuid/v2 v2.2.6 // indirect | ||||||
| 	github.com/leodido/go-urn v1.2.4 // indirect | 	github.com/leodido/go-urn v1.2.4 // indirect | ||||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||||
| 	github.com/mattn/go-isatty v0.0.20 // indirect | 	github.com/mattn/go-isatty v0.0.20 // indirect | ||||||
| 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | ||||||
| 	github.com/modern-go/reflect2 v1.0.2 // indirect | 	github.com/modern-go/reflect2 v1.0.2 // indirect | ||||||
| 	github.com/montanaflynn/stats v0.7.1 // indirect | 	github.com/montanaflynn/stats v0.7.1 // indirect | ||||||
| 	github.com/pelletier/go-toml/v2 v2.1.0 // indirect | 	github.com/pelletier/go-toml/v2 v2.1.1 // indirect | ||||||
|  | 	github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect | ||||||
| 	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect | 	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect | ||||||
| 	github.com/ugorji/go/codec v1.2.11 // indirect | 	github.com/ugorji/go/codec v1.2.12 // indirect | ||||||
| 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | ||||||
| 	github.com/xdg-go/scram v1.1.2 // indirect | 	github.com/xdg-go/scram v1.1.2 // indirect | ||||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||||
| 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | ||||||
| 	golang.org/x/arch v0.5.0 // indirect | 	golang.org/x/arch v0.7.0 // indirect | ||||||
| 	golang.org/x/net v0.17.0 // indirect | 	golang.org/x/net v0.20.0 // indirect | ||||||
| 	golang.org/x/sync v0.4.0 // indirect | 	golang.org/x/sync v0.6.0 // indirect | ||||||
| 	golang.org/x/text v0.13.0 // indirect | 	golang.org/x/text v0.14.0 // indirect | ||||||
| 	google.golang.org/protobuf v1.31.0 // indirect | 	google.golang.org/protobuf v1.32.0 // indirect | ||||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||||
|  | 	modernc.org/libc v1.37.6 // indirect | ||||||
|  | 	modernc.org/mathutil v1.6.0 // indirect | ||||||
|  | 	modernc.org/memory v1.7.2 // indirect | ||||||
|  | 	modernc.org/sqlite v1.28.0 // indirect | ||||||
| ) | ) | ||||||
|   | |||||||
							
								
								
									
										98
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										98
									
								
								go.sum
									
									
									
									
									
								
							| @@ -6,27 +6,31 @@ github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F | |||||||
| github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | ||||||
| github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo= |  | ||||||
| github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
|  | github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= | ||||||
|  | github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
| github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | ||||||
| github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
| github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | ||||||
| github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= | github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= | github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= | github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= | github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= | ||||||
| github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | ||||||
| github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | ||||||
| github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | ||||||
| github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= | github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= | ||||||
|  | github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ= | ||||||
|  | github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc= | ||||||
| github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= | github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= | ||||||
|  | github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= | ||||||
| github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= | github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= | ||||||
| github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= | github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= | ||||||
| github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= | github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= | ||||||
| github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | ||||||
| github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= | github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | ||||||
| github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||||
| @@ -40,20 +44,20 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ | |||||||
| github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= | github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= | ||||||
| github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | ||||||
| github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= | github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= | ||||||
|  | github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ= | ||||||
|  | github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= | ||||||
|  | github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= | ||||||
|  | github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||||||
| github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | ||||||
| github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | ||||||
| github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= | github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= | ||||||
| github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= | github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= | ||||||
| github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | ||||||
| github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= | github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= | ||||||
| github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
| github.com/klauspost/compress v1.17.1 h1:NE3C767s2ak2bweCZo3+rdP4U/HoyVXLv/X9f2gPS5g= |  | ||||||
| github.com/klauspost/compress v1.17.1/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= |  | ||||||
| github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= |  | ||||||
| github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= |  | ||||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= | github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
| github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | ||||||
| github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | ||||||
| github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | ||||||
| @@ -62,12 +66,12 @@ github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= | |||||||
| github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= | github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= | ||||||
| github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= | github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= | ||||||
| github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | ||||||
| github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= |  | ||||||
| github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | ||||||
| github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= | github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= | ||||||
| github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | ||||||
| github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= |  | ||||||
| github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | ||||||
|  | github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= | ||||||
|  | github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||||
| @@ -76,11 +80,13 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY | |||||||
| github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= | github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= | ||||||
| github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= | github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= | ||||||
| github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= | github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= | ||||||
| github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= | github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= | ||||||
| github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= | github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= | ||||||
| github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||||
| github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | ||||||
| github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | ||||||
|  | github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= | ||||||
|  | github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= | ||||||
| github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= | github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= | ||||||
| github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= | github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= | ||||||
| github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= | github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= | ||||||
| @@ -98,8 +104,8 @@ github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcU | |||||||
| github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= | github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= | ||||||
| github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= | github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= | ||||||
| github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= | github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= | ||||||
| github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= | github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= | ||||||
| github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= | github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= | ||||||
| github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= | github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= | ||||||
| github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= | github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= | ||||||
| github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= | github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= | ||||||
| @@ -110,31 +116,33 @@ github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7Jul | |||||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= | github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= | ||||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= | github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= | ||||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||||
| go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE= | go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= | ||||||
| go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ= | go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= | ||||||
| golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||||
| golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= | golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= | ||||||
| golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||||
| golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||||
| golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | ||||||
| golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | ||||||
| golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= | golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= | ||||||
| golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= | golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= | ||||||
|  | golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= | ||||||
|  | golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= | ||||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||||
| golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= | golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= | ||||||
| golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= | golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= | ||||||
| golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | ||||||
| golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos= | golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= | ||||||
| golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= | ||||||
| golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= | golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= | ||||||
| golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= | ||||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= | golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | ||||||
| golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= | golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||||
| golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||||
| golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| @@ -146,33 +154,41 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc | |||||||
| golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= | golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= | ||||||
| golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||||
| golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||||
| golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= | golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= | ||||||
| golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= | golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= | ||||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= | golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= | ||||||
| golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | ||||||
| golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | ||||||
| golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= | golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= | ||||||
| golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= | golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||||
| golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | ||||||
| golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | ||||||
| golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||||
| golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= |  | ||||||
| golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||||
| google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= | golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= | ||||||
| google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= | golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= | ||||||
| google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= | google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= | ||||||
|  | google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||||
| gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||||
| gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= | gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= | ||||||
| gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||||
|  | modernc.org/libc v1.37.6 h1:orZH3c5wmhIQFTXF+Nt+eeauyd+ZIt2BX6ARe+kD+aw= | ||||||
|  | modernc.org/libc v1.37.6/go.mod h1:YAXkAZ8ktnkCKaN9sw/UDeUVkGYJ/YquGO4FTi5nmHE= | ||||||
|  | modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4= | ||||||
|  | modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo= | ||||||
|  | modernc.org/memory v1.7.2 h1:Klh90S215mmH8c9gO98QxQFsY+W451E8AnzjoE2ee1E= | ||||||
|  | modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E= | ||||||
|  | modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ= | ||||||
|  | modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0= | ||||||
| nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= | nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= | ||||||
| rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= | rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.294" | const GoextVersion = "0.0.375" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2023-10-31T22:58:28+0100" | const GoextVersionTimestamp = "2024-01-14T01:50:48+0100" | ||||||
|   | |||||||
| @@ -37,6 +37,14 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (r GoJsonRender) RenderString() (string, error) { | ||||||
|  | 	jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(jsonBytes), nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (r GoJsonRender) WriteContentType(w http.ResponseWriter) { | func (r GoJsonRender) WriteContentType(w http.ResponseWriter) { | ||||||
| 	header := w.Header() | 	header := w.Header() | ||||||
| 	if val := header["Content-Type"]; len(val) == 0 { | 	if val := header["Content-Type"]; len(val) == 0 { | ||||||
|   | |||||||
							
								
								
									
										54
									
								
								googleapi/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								googleapi/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | |||||||
|  |  | ||||||
|  | Google OAuth Setup (to send mails) | ||||||
|  | ================================== | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  - Login @ https://console.cloud.google.com | ||||||
|  |  | ||||||
|  |  - GMail API akivieren: https://console.cloud.google.com/apis/library/gmail.googleapis.com? | ||||||
|  |  | ||||||
|  |  - Create new Project (aka 'BackendMailAPI') @ https://console.cloud.google.com/projectcreate | ||||||
|  |    User Type: Intern | ||||||
|  |    Anwendungsname: 'BackendMailAPI' | ||||||
|  |    Support-Email: ... | ||||||
|  |    Authorisierte Domains: 'heydyno.de' (or project domain) | ||||||
|  |    Kontakt-Email: ... | ||||||
|  |     | ||||||
|  |  | ||||||
|  |  - Unter "Anmeldedaten" neuer OAuth Client erstellen @ https://console.cloud.google.com/apis/credentials | ||||||
|  |    Anwendungstyp: Web | ||||||
|  |    Name: 'BackendMailOAuth' | ||||||
|  |    Redirect-Uri: 'http://localhost/oauth' | ||||||
|  |    Client-ID und Client-Key merken | ||||||
|  |  | ||||||
|  |  - Open in Browser: | ||||||
|  |    https://accounts.google.com/o/oauth2/v2/auth?redirect_uri=http://localhost/oauth&prompt=consent&response_type=code&client_id={...}&scope=https://www.googleapis.com/auth/gmail.send&access_type=offline | ||||||
|  |    Code aus redirected URI merken | ||||||
|  |  | ||||||
|  |  - Code via request einlösen (und refresh_roken merken): | ||||||
|  |  | ||||||
|  | ``` | ||||||
|  | curl --request POST \ | ||||||
|  |   --url https://oauth2.googleapis.com/token \ | ||||||
|  |   --data code={...} \ | ||||||
|  |   --data redirect_uri=http://localhost/oauth \ | ||||||
|  |   --data client_id={...} \ | ||||||
|  |   --data client_secret={...} \ | ||||||
|  |   --data grant_type=authorization_code \ | ||||||
|  |   --data scope=https://www.googleapis.com/auth/gmail.send | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  |  - Fertig, mit `client_id`, `client_secret` und `refresh_token` kann das package benutzt werden | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
							
								
								
									
										46
									
								
								googleapi/attachment.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								googleapi/attachment.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/base64" | ||||||
|  | 	"fmt" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type MailAttachment struct { | ||||||
|  | 	IsInline    bool | ||||||
|  | 	ContentType string | ||||||
|  | 	Filename    string | ||||||
|  | 	Data        []byte | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a MailAttachment) dump() []string { | ||||||
|  | 	res := make([]string, 0, 4) | ||||||
|  |  | ||||||
|  | 	if a.ContentType != "" { | ||||||
|  | 		res = append(res, "Content-Type: "+a.ContentType+"; charset=UTF-8") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	res = append(res, "Content-Transfer-Encoding: base64") | ||||||
|  |  | ||||||
|  | 	if a.IsInline { | ||||||
|  | 		if a.Filename != "" { | ||||||
|  | 			res = append(res, fmt.Sprintf("Content-Disposition: inline;filename=\"%s\"", a.Filename)) | ||||||
|  | 		} else { | ||||||
|  | 			res = append(res, "Content-Disposition: inline") | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		if a.Filename != "" { | ||||||
|  | 			res = append(res, fmt.Sprintf("Content-Disposition: attachment;filename=\"%s\"", a.Filename)) | ||||||
|  | 		} else { | ||||||
|  | 			res = append(res, "Content-Disposition: attachment") | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b64 := base64.StdEncoding.EncodeToString(a.Data) | ||||||
|  | 	for i := 0; i < len(b64); i += 80 { | ||||||
|  | 		res = append(res, b64[i:min(i+80, len(b64))]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	res = append(res) | ||||||
|  |  | ||||||
|  | 	return res | ||||||
|  | } | ||||||
							
								
								
									
										6
									
								
								googleapi/body.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								googleapi/body.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | type MailBody struct { | ||||||
|  | 	Plain string | ||||||
|  | 	HTML  string | ||||||
|  | } | ||||||
							
								
								
									
										224
									
								
								googleapi/mimeMessage.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										224
									
								
								googleapi/mimeMessage.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,224 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"mime" | ||||||
|  | 	"strings" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // https://datatracker.ietf.org/doc/html/rfc2822 | ||||||
|  | func encodeMimeMail(from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) string { | ||||||
|  |  | ||||||
|  | 	data := make([]string, 0, 32) | ||||||
|  |  | ||||||
|  | 	data = append(data, "Date: "+time.Now().Format(time.RFC1123Z)) | ||||||
|  | 	data = append(data, "MIME-Version: 1.0") | ||||||
|  | 	data = append(data, "From: "+mime.QEncoding.Encode("UTF-8", from)) | ||||||
|  | 	data = append(data, "To: "+strings.Join(langext.ArrMap(recipients, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", ")) | ||||||
|  | 	if len(cc) > 0 { | ||||||
|  | 		data = append(data, "To: "+strings.Join(langext.ArrMap(cc, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", ")) | ||||||
|  | 	} | ||||||
|  | 	if len(bcc) > 0 { | ||||||
|  | 		data = append(data, "Bcc: "+strings.Join(langext.ArrMap(bcc, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", ")) | ||||||
|  | 	} | ||||||
|  | 	data = append(data, "Subject: "+mime.QEncoding.Encode("UTF-8", subject)) | ||||||
|  |  | ||||||
|  | 	hasInlineAttachments := langext.ArrAny(attachments, func(v MailAttachment) bool { return v.IsInline }) | ||||||
|  | 	hasNormalAttachments := langext.ArrAny(attachments, func(v MailAttachment) bool { return !v.IsInline }) | ||||||
|  | 	hasPlain := body.Plain != "" | ||||||
|  | 	hasHTML := body.HTML != "" | ||||||
|  |  | ||||||
|  | 	mixedBoundary := langext.MustRawHexUUID() | ||||||
|  | 	relatedBoundary := langext.MustRawHexUUID() | ||||||
|  | 	altBoundary := langext.MustRawHexUUID() | ||||||
|  |  | ||||||
|  | 	inlineAttachments := langext.ArrFilter(attachments, func(v MailAttachment) bool { return v.IsInline }) | ||||||
|  | 	normalAttachments := langext.ArrFilter(attachments, func(v MailAttachment) bool { return !v.IsInline }) | ||||||
|  |  | ||||||
|  | 	if hasInlineAttachments && hasNormalAttachments { | ||||||
|  | 		// "mixed+related" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/mixed; boundary="+mixedBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+mixedBoundary) | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/related; boundary="+relatedBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, relatedBoundary, altBoundary)...) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		for i, attachment := range inlineAttachments { | ||||||
|  | 			data = append(data, "--"+relatedBoundary) | ||||||
|  | 			data = append(data, attachment.dump()...) | ||||||
|  |  | ||||||
|  | 			if i < len(inlineAttachments)-1 { | ||||||
|  | 				data = append(data, "") | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+relatedBoundary+"--") | ||||||
|  |  | ||||||
|  | 		for i, attachment := range normalAttachments { | ||||||
|  | 			data = append(data, "--"+mixedBoundary) | ||||||
|  | 			data = append(data, attachment.dump()...) | ||||||
|  |  | ||||||
|  | 			if i < len(normalAttachments)-1 { | ||||||
|  | 				data = append(data, "") | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+mixedBoundary+"--") | ||||||
|  |  | ||||||
|  | 	} else if hasNormalAttachments { | ||||||
|  | 		// "mixed" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/mixed; boundary="+mixedBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, mixedBoundary, altBoundary)...) | ||||||
|  | 		if hasPlain && hasHTML { | ||||||
|  | 			data = append(data, "") | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		for i, attachment := range normalAttachments { | ||||||
|  | 			data = append(data, "--"+mixedBoundary) | ||||||
|  | 			data = append(data, attachment.dump()...) | ||||||
|  |  | ||||||
|  | 			if i < len(normalAttachments)-1 { | ||||||
|  | 				data = append(data, "") | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+mixedBoundary+"--") | ||||||
|  |  | ||||||
|  | 	} else if hasInlineAttachments { | ||||||
|  | 		// "related" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/related; boundary="+relatedBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, relatedBoundary, altBoundary)...) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		for i, attachment := range inlineAttachments { | ||||||
|  | 			data = append(data, "--"+relatedBoundary) | ||||||
|  | 			data = append(data, attachment.dump()...) | ||||||
|  |  | ||||||
|  | 			if i < len(inlineAttachments)-1 { | ||||||
|  | 				data = append(data, "") | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+relatedBoundary+"--") | ||||||
|  |  | ||||||
|  | 	} else if hasPlain && hasHTML { | ||||||
|  | 		// "alternative" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/alternative; boundary="+altBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, altBoundary, altBoundary)...) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+altBoundary+"--") | ||||||
|  |  | ||||||
|  | 	} else if hasPlain { | ||||||
|  | 		// "plain" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.Plain) | ||||||
|  |  | ||||||
|  | 	} else if hasHTML { | ||||||
|  | 		// "plain" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: text/html; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  | 		// "empty??" | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return strings.Join(data, "\r\n") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func dumpMailBody(body MailBody, hasInlineAttachments bool, hasNormalAttachments bool, boundary string, boundaryAlt string) []string { | ||||||
|  |  | ||||||
|  | 	if body.HTML != "" && body.Plain != "" && !hasInlineAttachments && hasNormalAttachments { | ||||||
|  | 		data := make([]string, 0, 16) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: multipart/alternative; boundary="+boundaryAlt) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+boundaryAlt) | ||||||
|  | 		data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.Plain) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+boundaryAlt) | ||||||
|  | 		data = append(data, "Content-Type: text/html; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+boundaryAlt+"--") | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if body.HTML != "" && body.Plain != "" && hasInlineAttachments { | ||||||
|  | 		data := make([]string, 0, 2) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if body.HTML != "" && body.Plain != "" { | ||||||
|  | 		data := make([]string, 0, 8) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.Plain) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: text/html; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if body.HTML != "" { | ||||||
|  | 		data := make([]string, 0, 2) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: text/html; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if body.Plain != "" { | ||||||
|  | 		data := make([]string, 0, 2) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.Plain) | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	data := make([]string, 0, 16) | ||||||
|  | 	data = append(data, "--"+boundary) | ||||||
|  | 	data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 	data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 	data = append(data, "") | ||||||
|  | 	data = append(data, "") // no content ?!? | ||||||
|  | 	return data | ||||||
|  | } | ||||||
							
								
								
									
										80
									
								
								googleapi/mimeMessage_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								googleapi/mimeMessage_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestEncodeMimeMail(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	mail := encodeMimeMail( | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail", | ||||||
|  | 		MailBody{Plain: "Plain Text"}, | ||||||
|  | 		nil) | ||||||
|  |  | ||||||
|  | 	verifyMime(mail) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestEncodeMimeMail2(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	mail := encodeMimeMail( | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (alternative)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			Plain: "Plain Text", | ||||||
|  | 			HTML:  "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		nil) | ||||||
|  |  | ||||||
|  | 	verifyMime(mail) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestEncodeMimeMail3(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	mail := encodeMimeMail( | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (alternative)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		[]MailAttachment{ | ||||||
|  | 			{Data: []byte("HelloWorld"), Filename: "test.txt", IsInline: false, ContentType: "text/plain"}, | ||||||
|  | 		}) | ||||||
|  |  | ||||||
|  | 	verifyMime(mail) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestEncodeMimeMail4(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	b := tst.Must(os.ReadFile("test_placeholder.png"))(t) | ||||||
|  |  | ||||||
|  | 	mail := encodeMimeMail( | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (inline)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		[]MailAttachment{ | ||||||
|  | 			{Data: b, Filename: "img.png", IsInline: true, ContentType: "image/png"}, | ||||||
|  | 		}) | ||||||
|  |  | ||||||
|  | 	verifyMime(mail) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func verifyMime(mail string) { | ||||||
|  | 	//fmt.Printf("%s\n\n", mail) | ||||||
|  | } | ||||||
							
								
								
									
										91
									
								
								googleapi/oAuth.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										91
									
								
								googleapi/oAuth.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,91 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"sync" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type GoogleOAuth interface { | ||||||
|  | 	AccessToken() (string, error) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type oauth struct { | ||||||
|  | 	clientID     string | ||||||
|  | 	clientSecret string | ||||||
|  | 	refreshToken string | ||||||
|  |  | ||||||
|  | 	lock        sync.RWMutex | ||||||
|  | 	accessToken *string | ||||||
|  | 	expiryDate  *time.Time | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewGoogleOAuth(clientid string, clientsecret, refreshtoken string) GoogleOAuth { | ||||||
|  | 	return &oauth{ | ||||||
|  | 		clientID:     clientid, | ||||||
|  | 		clientSecret: clientsecret, | ||||||
|  | 		refreshToken: refreshtoken, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *oauth) AccessToken() (string, error) { | ||||||
|  | 	c.lock.RLock() | ||||||
|  | 	if c.accessToken != nil && c.expiryDate != nil && (*c.expiryDate).After(time.Now()) { | ||||||
|  | 		c.lock.RUnlock() | ||||||
|  | 		return *c.accessToken, nil // still valid | ||||||
|  | 	} | ||||||
|  | 	c.lock.RUnlock() | ||||||
|  |  | ||||||
|  | 	httpclient := http.Client{} | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("https://oauth2.googleapis.com/token?client_id=%s&client_secret=%s&grant_type=%s&refresh_token=%s", | ||||||
|  | 		c.clientID, | ||||||
|  | 		c.clientSecret, | ||||||
|  | 		"refresh_token", | ||||||
|  | 		c.refreshToken) | ||||||
|  |  | ||||||
|  | 	req, err := http.NewRequest(http.MethodPost, url, nil) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	reqStartTime := time.Now() | ||||||
|  |  | ||||||
|  | 	res, err := httpclient.Do(req) | ||||||
|  |  | ||||||
|  | 	type response struct { | ||||||
|  | 		AccessToken string `json:"access_token"` | ||||||
|  | 		ExpiresIn   int    `json:"expires_in"` | ||||||
|  | 		Scope       string `json:"scope"` | ||||||
|  | 		TokenType   string `json:"token_type"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var r response | ||||||
|  |  | ||||||
|  | 	data, err := io.ReadAll(res.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = json.Unmarshal(data, &r) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if r.ExpiresIn == 0 || r.AccessToken == "" { | ||||||
|  | 		return "", exerr.New(exerr.TypeGoogleResponse, "google oauth returned no response").Str("body", string(data)).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	c.lock.Lock() | ||||||
|  | 	c.expiryDate = langext.Ptr(reqStartTime.Add(timeext.FromSeconds(r.ExpiresIn - 10))) | ||||||
|  | 	c.accessToken = langext.Ptr(r.AccessToken) | ||||||
|  | 	c.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	return r.AccessToken, nil | ||||||
|  | } | ||||||
							
								
								
									
										69
									
								
								googleapi/sendMail.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										69
									
								
								googleapi/sendMail.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,69 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"context" | ||||||
|  | 	"encoding/base64" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type MailRef struct { | ||||||
|  | 	ID       string   `json:"id"` | ||||||
|  | 	ThreadID string   `json:"threadId"` | ||||||
|  | 	LabelIDs []string `json:"labelIds"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *client) SendMail(ctx context.Context, from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) (MailRef, error) { | ||||||
|  |  | ||||||
|  | 	mm := encodeMimeMail(from, recipients, cc, bcc, subject, body, attachments) | ||||||
|  |  | ||||||
|  | 	tok, err := c.oauth.AccessToken() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("https://gmail.googleapis.com/gmail/v1/users/%s/messages/send?alt=json&prettyPrint=false", "me") | ||||||
|  |  | ||||||
|  | 	msgbody, err := json.Marshal(langext.H{"raw": base64.URLEncoding.EncodeToString([]byte(mm))}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(msgbody)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	req.Header.Add("Authorization", "Bearer "+tok) | ||||||
|  | 	req.Header.Add("X-Goog-Api-Client", "blackforestbytes-goext/"+goext.GoextVersion) | ||||||
|  | 	req.Header.Add("User-Agent", "blackforestbytes-goext/"+goext.GoextVersion) | ||||||
|  | 	req.Header.Add("Content-Type", "application/json") | ||||||
|  |  | ||||||
|  | 	resp, err := c.http.Do(req) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	respBody, err := io.ReadAll(resp.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if resp.StatusCode != 200 { | ||||||
|  | 		return MailRef{}, exerr.New(exerr.TypeGoogleStatuscode, "gmail returned non-200 statuscode").Int("sc", resp.StatusCode).Str("body", string(respBody)).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var respObj MailRef | ||||||
|  | 	err = json.Unmarshal(respBody, &respObj) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Str("body", string(respBody)).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return respObj, nil | ||||||
|  | } | ||||||
							
								
								
									
										151
									
								
								googleapi/sendMail_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										151
									
								
								googleapi/sendMail_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,151 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestMain(m *testing.M) { | ||||||
|  | 	if !exerr.Initialized() { | ||||||
|  | 		exerr.Init(exerr.ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse}) | ||||||
|  | 	} | ||||||
|  | 	os.Exit(m.Run()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSendMail1(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	auth := NewGoogleOAuth( | ||||||
|  | 		"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com", | ||||||
|  | 		"TODO", | ||||||
|  | 		"TODO") | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	gclient := NewGoogleClient(auth) | ||||||
|  |  | ||||||
|  | 	mail, err := gclient.SendMail( | ||||||
|  | 		ctx, | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail", | ||||||
|  | 		MailBody{Plain: "Plain Text"}, | ||||||
|  | 		nil) | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("mail.ID        := %s\n", mail.ID) | ||||||
|  | 	fmt.Printf("mail.ThreadID  := %s\n", mail.ThreadID) | ||||||
|  | 	fmt.Printf("mail.LabelIDs  := %v\n", mail.LabelIDs) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSendMail2(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	auth := NewGoogleOAuth( | ||||||
|  | 		"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com", | ||||||
|  | 		"TODO", | ||||||
|  | 		"TODO") | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	gclient := NewGoogleClient(auth) | ||||||
|  |  | ||||||
|  | 	mail, err := gclient.SendMail( | ||||||
|  | 		ctx, | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (alternative)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			Plain: "Plain Text", | ||||||
|  | 			HTML:  "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		nil) | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("mail.ID        := %s\n", mail.ID) | ||||||
|  | 	fmt.Printf("mail.ThreadID  := %s\n", mail.ThreadID) | ||||||
|  | 	fmt.Printf("mail.LabelIDs  := %v\n", mail.LabelIDs) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSendMail3(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	auth := NewGoogleOAuth( | ||||||
|  | 		"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com", | ||||||
|  | 		"TODO", | ||||||
|  | 		"TODO") | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	gclient := NewGoogleClient(auth) | ||||||
|  |  | ||||||
|  | 	mail, err := gclient.SendMail( | ||||||
|  | 		ctx, | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (attach)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		[]MailAttachment{ | ||||||
|  | 			{Data: []byte("HelloWorld"), Filename: "test.txt", IsInline: false, ContentType: "text/plain"}, | ||||||
|  | 		}) | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("mail.ID        := %s\n", mail.ID) | ||||||
|  | 	fmt.Printf("mail.ThreadID  := %s\n", mail.ThreadID) | ||||||
|  | 	fmt.Printf("mail.LabelIDs  := %v\n", mail.LabelIDs) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSendMail4(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	auth := NewGoogleOAuth( | ||||||
|  | 		"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com", | ||||||
|  | 		"TODO", | ||||||
|  | 		"TODO") | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	gclient := NewGoogleClient(auth) | ||||||
|  |  | ||||||
|  | 	b := tst.Must(os.ReadFile("test_placeholder.png"))(t) | ||||||
|  |  | ||||||
|  | 	mail, err := gclient.SendMail( | ||||||
|  | 		ctx, | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (inline)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		[]MailAttachment{ | ||||||
|  | 			{Data: b, Filename: "img.png", IsInline: true, ContentType: "image/png"}, | ||||||
|  | 		}) | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("mail.ID        := %s\n", mail.ID) | ||||||
|  | 	fmt.Printf("mail.ThreadID  := %s\n", mail.ThreadID) | ||||||
|  | 	fmt.Printf("mail.LabelIDs  := %v\n", mail.LabelIDs) | ||||||
|  | } | ||||||
							
								
								
									
										22
									
								
								googleapi/service.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								googleapi/service.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"net/http" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type GoogleClient interface { | ||||||
|  | 	SendMail(ctx context.Context, from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) (MailRef, error) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type client struct { | ||||||
|  | 	oauth GoogleOAuth | ||||||
|  | 	http  http.Client | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewGoogleClient(oauth GoogleOAuth) GoogleClient { | ||||||
|  | 	return &client{ | ||||||
|  | 		oauth: oauth, | ||||||
|  | 		http:  http.Client{}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										
											BIN
										
									
								
								googleapi/test_placeholder.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								googleapi/test_placeholder.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 11 KiB | 
							
								
								
									
										178
									
								
								langext/baseAny.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										178
									
								
								langext/baseAny.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,178 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"crypto/rand" | ||||||
|  | 	"errors" | ||||||
|  | 	"math" | ||||||
|  | 	"math/big" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type AnyBaseConverter struct { | ||||||
|  | 	base    uint64 | ||||||
|  | 	charset []rune | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewAnyBaseConverter(cs string) AnyBaseConverter { | ||||||
|  | 	rcs := []rune(cs) | ||||||
|  | 	return AnyBaseConverter{ | ||||||
|  | 		base:    uint64(len(rcs)), | ||||||
|  | 		charset: rcs, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Rand(rlen int) string { | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	randMax := big.NewInt(math.MaxInt64) | ||||||
|  |  | ||||||
|  | 	r := "" | ||||||
|  |  | ||||||
|  | 	for i := 0; i < rlen; i++ { | ||||||
|  | 		v, err := rand.Int(rand.Reader, randMax) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		r += string(bc.charset[v.Mod(v, biBase).Int64()]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) EncodeUInt64(num uint64) string { | ||||||
|  | 	if num == 0 { | ||||||
|  | 		return "0" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b := "" | ||||||
|  |  | ||||||
|  | 	// loop as long the num is bigger than zero | ||||||
|  | 	for num > 0 { | ||||||
|  | 		r := num % bc.base | ||||||
|  |  | ||||||
|  | 		num -= r | ||||||
|  | 		num /= base62Base | ||||||
|  |  | ||||||
|  | 		b += string(bc.charset[int(r)]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) DecodeUInt64(str string) (uint64, error) { | ||||||
|  | 	if str == "" { | ||||||
|  | 		return 0, errors.New("empty string") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	result := uint64(0) | ||||||
|  |  | ||||||
|  | 	for _, v := range str { | ||||||
|  | 		result *= base62Base | ||||||
|  |  | ||||||
|  | 		pos := ArrFirstIndex(bc.charset, v) | ||||||
|  | 		if pos == -1 { | ||||||
|  | 			return 0, errors.New("invalid character: " + string(v)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		result += uint64(pos) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Encode(src []byte) string { | ||||||
|  | 	value := new(big.Int) | ||||||
|  | 	value.SetBytes(src) | ||||||
|  | 	return bc.EncodeBigInt(value) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) EncodeBigInt(src *big.Int) string { | ||||||
|  | 	value := new(big.Int) | ||||||
|  | 	value.Set(src) | ||||||
|  |  | ||||||
|  | 	isneg := value.Sign() < 0 | ||||||
|  |  | ||||||
|  | 	answer := "" | ||||||
|  |  | ||||||
|  | 	if isneg { | ||||||
|  | 		value.Neg(value) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	rem := new(big.Int) | ||||||
|  |  | ||||||
|  | 	for value.Sign() > 0 { | ||||||
|  | 		value.QuoRem(value, biBase, rem) | ||||||
|  | 		answer = string(bc.charset[rem.Int64()]) + answer | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if isneg { | ||||||
|  | 		return "-" + answer | ||||||
|  | 	} else { | ||||||
|  | 		return answer | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Decode(src string) ([]byte, error) { | ||||||
|  | 	value, err := bc.DecodeToBigInt(src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, err | ||||||
|  | 	} | ||||||
|  | 	return value.Bytes(), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) DecodeToBigInt(_src string) (*big.Int, error) { | ||||||
|  | 	result := new(big.Int) | ||||||
|  | 	result.SetInt64(0) | ||||||
|  |  | ||||||
|  | 	src := []rune(_src) | ||||||
|  |  | ||||||
|  | 	if len(src) == 0 { | ||||||
|  | 		return nil, errors.New("string is empty") | ||||||
|  | 	} | ||||||
|  | 	if bc.base < 2 { | ||||||
|  | 		return nil, errors.New("not enough digits") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	i := 0 | ||||||
|  |  | ||||||
|  | 	sign := new(big.Int) | ||||||
|  | 	sign.SetInt64(1) | ||||||
|  | 	if src[i] == '+' { | ||||||
|  | 		i++ | ||||||
|  | 	} else if src[i] == '-' { | ||||||
|  | 		i++ | ||||||
|  | 		sign.SetInt64(-1) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if i >= len(src) { | ||||||
|  | 		return nil, errors.New("no digits in input") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	oldResult := new(big.Int) | ||||||
|  |  | ||||||
|  | 	for ; i < len(src); i++ { | ||||||
|  | 		n := ArrFirstIndex(bc.charset, src[i]) | ||||||
|  | 		if n < 0 { | ||||||
|  | 			return nil, errors.New("invalid characters in input") | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		oldResult.Set(result) | ||||||
|  |  | ||||||
|  | 		result.Mul(result, biBase) | ||||||
|  | 		result.Add(result, big.NewInt(int64(n))) | ||||||
|  |  | ||||||
|  | 		if result.Cmp(oldResult) < 0 { | ||||||
|  | 			return nil, errors.New("overflow") | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if sign.Cmp(big.NewInt(0)) < 0 { | ||||||
|  | 		result.Neg(result) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
							
								
								
									
										80
									
								
								langext/baseAny_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								langext/baseAny_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func _anyEncStr(bc AnyBaseConverter, v string) string { | ||||||
|  | 	vr := bc.Encode([]byte(v)) | ||||||
|  | 	return vr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func _anyDecStr(bc AnyBaseConverter, v string) string { | ||||||
|  | 	vr, err := bc.Decode(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(vr) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBase58DefaultEncoding(t *testing.T) { | ||||||
|  | 	tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "Hello"), "9Ajdvzr") | ||||||
|  | 	tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in."), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBase58DefaultDecoding(t *testing.T) { | ||||||
|  | 	tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "9Ajdvzr"), "Hello") | ||||||
|  | 	tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBaseDecode(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	const ( | ||||||
|  | 		Binary  = "01" | ||||||
|  | 		Decimal = "0123456789" | ||||||
|  | 		Hex     = "0123456789ABCDEF" | ||||||
|  | 		DNA     = "ACGT" | ||||||
|  | 		Base32  = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" | ||||||
|  | 		Base58  = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" | ||||||
|  | 		Base62  = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" | ||||||
|  | 		Base64  = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" | ||||||
|  | 		Base256 = "🚀🪐☄🛰🌌🌑🌒🌓🌔🌕🌖🌗🌘🌍🌏🌎🐉☀💻🖥💾💿😂❤😍🤣😊🙏💕😭😘👍😅👏😁🔥🥰💔💖💙😢🤔😆🙄💪😉☺👌🤗💜😔😎😇🌹🤦🎉💞✌✨🤷😱😌🌸🙌😋💗💚😏💛🙂💓🤩😄😀🖤😃💯🙈👇🎶😒🤭❣😜💋👀😪😑💥🙋😞😩😡🤪👊🥳😥🤤👉💃😳✋😚😝😴🌟😬🙃🍀🌷😻😓⭐✅🥺🌈😈🤘💦✔😣🏃💐☹🎊💘😠☝😕🌺🎂🌻😐🖕💝🙊😹🗣💫💀👑🎵🤞😛🔴😤🌼😫⚽🤙☕🏆🤫👈😮🙆🍻🍃🐶💁😲🌿🧡🎁⚡🌞🎈❌✊👋😰🤨😶🤝🚶💰🍓💢🤟🙁🚨💨🤬✈🎀🍺🤓😙💟🌱😖👶🥴▶➡❓💎💸⬇😨🌚🦋😷🕺⚠🙅😟😵👎🤲🤠🤧📌🔵💅🧐🐾🍒😗🤑🌊🤯🐷☎💧😯💆👆🎤🙇🍑❄🌴💣🐸💌📍🥀🤢👅💡💩👐📸👻🤐🤮🎼🥵🚩🍎🍊👼💍📣🥂" | ||||||
|  | 	) | ||||||
|  |  | ||||||
|  | 	type TestDef struct { | ||||||
|  | 		FromCS  string | ||||||
|  | 		FromVal string | ||||||
|  | 		ToCS    string | ||||||
|  | 		ToVal   string | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defs := []TestDef{ | ||||||
|  | 		{Binary, "10100101011100000101010", Decimal, "5421098"}, | ||||||
|  | 		{Decimal, "5421098", DNA, "CCAGGTGAAGGG"}, | ||||||
|  | 		{Decimal, "5421098", DNA, "CCAGGTGAAGGG"}, | ||||||
|  | 		{Decimal, "80085", Base256, "🪐💞🔵"}, | ||||||
|  | 		{Hex, "48656C6C6C20576F526C5421", Base64, "SGVsbGwgV29SbFQh"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base32, "CIMVWGY3B7QFO32SNRPZBB"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base58, "2fUsGKQUcgQcwSqpvy6"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base62, "V34nvybdQ3m3RHk9Sr"}, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, def := range defs { | ||||||
|  |  | ||||||
|  | 		d1 := NewAnyBaseConverter(def.FromCS) | ||||||
|  | 		d2 := NewAnyBaseConverter(def.ToCS) | ||||||
|  |  | ||||||
|  | 		v1 := tst.Must(d1.Decode(def.FromVal))(t) | ||||||
|  | 		v2 := tst.Must(d2.Decode(def.ToVal))(t) | ||||||
|  |  | ||||||
|  | 		tst.AssertArrayEqual(t, v1, v2) | ||||||
|  |  | ||||||
|  | 		str2 := d2.Encode(v1) | ||||||
|  | 		tst.AssertEqual(t, str2, def.ToVal) | ||||||
|  |  | ||||||
|  | 		str1 := d1.Encode(v2) | ||||||
|  | 		tst.AssertEqual(t, str1, def.FromVal) | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -29,6 +29,14 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V { | |||||||
| 	return result | 	return result | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrToKVMap[T any, K comparable, V any](a []T, keyfunc func(T) K, valfunc func(T) V) map[K]V { | ||||||
|  | 	result := make(map[K]V, len(a)) | ||||||
|  | 	for _, v := range a { | ||||||
|  | 		result[keyfunc(v)] = valfunc(v) | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
| func ArrToSet[T comparable](a []T) map[T]bool { | func ArrToSet[T comparable](a []T) map[T]bool { | ||||||
| 	result := make(map[T]bool, len(a)) | 	result := make(map[T]bool, len(a)) | ||||||
| 	for _, v := range a { | 	for _, v := range a { | ||||||
|   | |||||||
| @@ -10,10 +10,23 @@ var PTrue = Ptr(true) | |||||||
| // PFalse := &false | // PFalse := &false | ||||||
| var PFalse = Ptr(false) | var PFalse = Ptr(false) | ||||||
|  |  | ||||||
|  | // PNil := &nil | ||||||
|  | var PNil = Ptr[any](nil) | ||||||
|  |  | ||||||
| func Ptr[T any](v T) *T { | func Ptr[T any](v T) *T { | ||||||
| 	return &v | 	return &v | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func DblPtr[T any](v T) **T { | ||||||
|  | 	v_ := &v | ||||||
|  | 	return &v_ | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func DblPtrNil[T any]() **T { | ||||||
|  | 	var v *T = nil | ||||||
|  | 	return &v | ||||||
|  | } | ||||||
|  |  | ||||||
| func PtrInt32(v int32) *int32 { | func PtrInt32(v int32) *int32 { | ||||||
| 	return &v | 	return &v | ||||||
| } | } | ||||||
| @@ -35,7 +48,7 @@ func IsNil(i interface{}) bool { | |||||||
| 		return true | 		return true | ||||||
| 	} | 	} | ||||||
| 	switch reflect.TypeOf(i).Kind() { | 	switch reflect.TypeOf(i).Kind() { | ||||||
| 	case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice: | 	case reflect.Ptr, reflect.Map, reflect.Chan, reflect.Slice, reflect.Func, reflect.UnsafePointer: | ||||||
| 		return reflect.ValueOf(i).IsNil() | 		return reflect.ValueOf(i).IsNil() | ||||||
| 	} | 	} | ||||||
| 	return false | 	return false | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| ) | ) | ||||||
| @@ -18,9 +19,21 @@ func CreateGoExtBsonRegistry() *bsoncodec.Registry { | |||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixTime{}), rfctime.UnixTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixTime{}), rfctime.UnixTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixMilliTime{}), rfctime.UnixMilliTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixMilliTime{}), rfctime.UnixMilliTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixNanoTime{}), rfctime.UnixNanoTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixNanoTime{}), rfctime.UnixNanoTime{}) | ||||||
|  |  | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.Date{}), rfctime.Date{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.Date{}), rfctime.Date{}) | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.Date{}), rfctime.Date{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.Date{}), rfctime.Date{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.SecondsF64(0)), rfctime.SecondsF64(0)) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(rfctime.SecondsF64(0))), rfctime.SecondsF64(0)) | ||||||
|  |  | ||||||
| 	bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) | 	bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) | ||||||
| 	bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb) | 	bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb) | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										28
									
								
								pagination/filter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								pagination/filter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | |||||||
|  | package pagination | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type MongoFilter interface { | ||||||
|  | 	FilterQuery() mongo.Pipeline | ||||||
|  | 	Sort() bson.D | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type dynamicFilter struct { | ||||||
|  | 	pipeline mongo.Pipeline | ||||||
|  | 	sort     bson.D | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dynamicFilter) FilterQuery() mongo.Pipeline { | ||||||
|  | 	return d.pipeline | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dynamicFilter) Sort() bson.D { | ||||||
|  | 	return d.sort | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CreateFilter(pipeline mongo.Pipeline, sort bson.D) MongoFilter { | ||||||
|  | 	return dynamicFilter{pipeline: pipeline, sort: sort} | ||||||
|  | } | ||||||
							
								
								
									
										19
									
								
								pagination/pagination.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								pagination/pagination.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | package pagination | ||||||
|  |  | ||||||
|  | type Pagination struct { | ||||||
|  | 	Page             int `json:"page"`             // page (first page == 1) | ||||||
|  | 	Limit            int `json:"limit"`            // max-page-size | ||||||
|  | 	TotalPages       int `json:"totalPages"`       // total page-count | ||||||
|  | 	TotalItems       int `json:"totalItems"`       // total items-count | ||||||
|  | 	CurrentPageCount int `json:"currentPageCount"` // item-count in current page ( == len(data) ) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CalcPaginationTotalPages(totalItems int, limit int) int { | ||||||
|  | 	if totalItems == 0 { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	if limit == 0 { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 + (totalItems-1)/limit | ||||||
|  | } | ||||||
							
								
								
									
										98
									
								
								reflectext/mapAccess.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										98
									
								
								reflectext/mapAccess.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,98 @@ | |||||||
|  | package reflectext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"reflect" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // GetMapPath returns the value deep inside a hierahically nested map structure | ||||||
|  | // eg: | ||||||
|  | // x := langext.H{"K1": langext.H{"K2": 665}} | ||||||
|  | // GetMapPath[int](x, "K1.K2") == 665 | ||||||
|  | func GetMapPath[TData any](mapval any, path string) (TData, bool) { | ||||||
|  | 	var ok bool | ||||||
|  |  | ||||||
|  | 	split := strings.Split(path, ".") | ||||||
|  |  | ||||||
|  | 	for i, key := range split { | ||||||
|  |  | ||||||
|  | 		if i < len(split)-1 { | ||||||
|  | 			mapval, ok = GetMapField[any](mapval, key) | ||||||
|  | 			if !ok { | ||||||
|  | 				return *new(TData), false | ||||||
|  | 			} | ||||||
|  | 		} else { | ||||||
|  | 			return GetMapField[TData](mapval, key) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *new(TData), false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // GetMapField gets the value of a map, without knowing the actual types (mapval is any) | ||||||
|  | // eg: | ||||||
|  | // x := langext.H{"K1": 665} | ||||||
|  | // GetMapPath[int](x, "K1") == 665 | ||||||
|  | // | ||||||
|  | // works with aliased types and autom. dereferences pointes | ||||||
|  | func GetMapField[TData any, TKey comparable](mapval any, key TKey) (TData, bool) { | ||||||
|  |  | ||||||
|  | 	rval := reflect.ValueOf(mapval) | ||||||
|  |  | ||||||
|  | 	for rval.Kind() == reflect.Ptr && !rval.IsNil() { | ||||||
|  | 		rval = rval.Elem() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if rval.Kind() != reflect.Map { | ||||||
|  | 		return *new(TData), false // mapval is not a map | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	kval := reflect.ValueOf(key) | ||||||
|  |  | ||||||
|  | 	if !kval.Type().AssignableTo(rval.Type().Key()) { | ||||||
|  | 		return *new(TData), false // key cannot index mapval | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	eval := rval.MapIndex(kval) | ||||||
|  | 	if !eval.IsValid() { | ||||||
|  | 		return *new(TData), false // key does not exist in mapval | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	destType := reflect.TypeOf(new(TData)).Elem() | ||||||
|  |  | ||||||
|  | 	if eval.Type() == destType { | ||||||
|  | 		return eval.Interface().(TData), true | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if eval.CanConvert(destType) && !preventConvert(eval.Type(), destType) { | ||||||
|  | 		return eval.Convert(destType).Interface().(TData), true | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if (eval.Kind() == reflect.Ptr || eval.Kind() == reflect.Interface) && eval.IsNil() && destType.Kind() == reflect.Ptr { | ||||||
|  | 		return *new(TData), false // special case: mapval[key] is nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for (eval.Kind() == reflect.Ptr || eval.Kind() == reflect.Interface) && !eval.IsNil() { | ||||||
|  | 		eval = eval.Elem() | ||||||
|  |  | ||||||
|  | 		if eval.Type() == destType { | ||||||
|  | 			return eval.Interface().(TData), true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if eval.CanConvert(destType) && !preventConvert(eval.Type(), destType) { | ||||||
|  | 			return eval.Convert(destType).Interface().(TData), true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *new(TData), false // mapval[key] is not of type TData | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func preventConvert(t1 reflect.Type, t2 reflect.Type) bool { | ||||||
|  | 	if t1.Kind() == reflect.String && t1.Kind() != reflect.String { | ||||||
|  | 		return true | ||||||
|  | 	} | ||||||
|  | 	if t2.Kind() == reflect.String && t1.Kind() != reflect.String { | ||||||
|  | 		return true | ||||||
|  | 	} | ||||||
|  | 	return false | ||||||
|  | } | ||||||
							
								
								
									
										55
									
								
								reflectext/mapAccess_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								reflectext/mapAccess_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | |||||||
|  | package reflectext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestGetMapPath(t *testing.T) { | ||||||
|  | 	type PseudoInt = int64 | ||||||
|  |  | ||||||
|  | 	mymap2 := map[string]map[string]any{"Test": {"Second": 3}} | ||||||
|  |  | ||||||
|  | 	var maany2 any = mymap2 | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test.Second")), "3 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test2.Second")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test.Second2")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test.Second")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test2.Second")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test.Second2")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test.Second")), "3 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test2.Second")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test.Second2")), "0 false") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestGetMapField(t *testing.T) { | ||||||
|  | 	type PseudoInt = int64 | ||||||
|  |  | ||||||
|  | 	mymap1 := map[string]any{"Test": 12} | ||||||
|  | 	mymap2 := map[string]int{"Test": 12} | ||||||
|  |  | ||||||
|  | 	var maany1 any = mymap1 | ||||||
|  | 	var maany2 any = mymap2 | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany1, "Test")), "12 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany1, "Test2")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany1, "Test")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany1, "Test2")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany1, "Test")), "12 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany1, "Test2")), "0 false") | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany2, "Test")), "12 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany2, "Test2")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany2, "Test")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany2, "Test2")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test")), "12 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test2")), "0 false") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func main2() { | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func main() { | ||||||
|  | } | ||||||
| @@ -2,7 +2,14 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||||
|  | 	"reflect" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -54,6 +61,63 @@ func (d SecondsF64) MarshalJSON() ([]byte, error) { | |||||||
| 	return json.Marshal(secs) | 	return json.Marshal(secs) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (d *SecondsF64) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*d = SecondsF64(0) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDouble { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into SecondsF64", bt)) | ||||||
|  | 	} | ||||||
|  | 	var secValue float64 | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&secValue) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*d = SecondsF64(int64(secValue * float64(time.Second))) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d SecondsF64) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(d.Seconds()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d SecondsF64) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = d.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&d)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(d)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func NewSecondsF64(t time.Duration) SecondsF64 { | func NewSecondsF64(t time.Duration) SecondsF64 { | ||||||
| 	return SecondsF64(t) | 	return SecondsF64(t) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixTime) Serialize() string { | func (t UnixTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().Unix(), 10) | 	return strconv.FormatInt(t.Time().Unix(), 10) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixMilliTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixMilliTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixMilliTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixMilliTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixMilliTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixMilliTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixMilliTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixMilliTime) Serialize() string { | func (t UnixMilliTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().UnixMilli(), 10) | 	return strconv.FormatInt(t.Time().UnixMilli(), 10) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixNanoTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixNanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixNanoTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixNanoTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixNanoTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixNanoTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixNanoTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixNanoTime) Serialize() string { | func (t UnixNanoTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().UnixNano(), 10) | 	return strconv.FormatInt(t.Time().UnixNano(), 10) | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										13
									
								
								scn/scn.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								scn/scn.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | |||||||
|  | package scn | ||||||
|  |  | ||||||
|  | type Connection struct { | ||||||
|  | 	uid   string | ||||||
|  | 	token string | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func New(userid string, token string) *Connection { | ||||||
|  | 	return &Connection{ | ||||||
|  | 		uid:   userid, | ||||||
|  | 		token: token, | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										171
									
								
								scn/send.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										171
									
								
								scn/send.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,171 @@ | |||||||
|  | package scn | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"context" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	json "gogs.mikescher.com/BlackForestBytes/goext/gojson" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var ( | ||||||
|  | 	ErrAuthFailed        = exerr.NewType("GOEXT_SCN_AUTHFAILED", nil) | ||||||
|  | 	ErrQuota             = exerr.NewType("GOEXT_SCN_QUOTAEXCEEDED", nil) | ||||||
|  | 	ErrBadRequest        = exerr.NewType("GOEXT_SCN_BADREQUEST", nil) | ||||||
|  | 	ErrInternalServerErr = exerr.NewType("GOEXT_SCN_INTERNALSERVER", nil) | ||||||
|  | 	ErrOther             = exerr.NewType("GOEXT_SCN_OTHER", nil) | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type MessageResponse struct { | ||||||
|  | 	ErrHighlight int    `json:"errhighlight"` | ||||||
|  | 	Error        int    `json:"error"` | ||||||
|  | 	IsPro        bool   `json:"is_pro"` | ||||||
|  | 	Message      string `json:"message"` | ||||||
|  | 	Messagecount int    `json:"messagecount"` | ||||||
|  | 	Quota        int    `json:"quota"` | ||||||
|  | 	QuotaMax     int    `json:"quota_max"` | ||||||
|  | 	SCNMessageID string `json:"scn_msg_id"` | ||||||
|  | 	Success      bool   `json:"success"` | ||||||
|  | 	SuppressSend bool   `json:"suppress_send"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type MessageErrResponse struct { | ||||||
|  | 	Errhighlight int    `json:"errhighlight"` | ||||||
|  | 	Error        int    `json:"error"` | ||||||
|  | 	Message      string `json:"message"` | ||||||
|  | 	Success      bool   `json:"success"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type MessageBuilder struct { | ||||||
|  | 	conn       *Connection | ||||||
|  | 	title      string | ||||||
|  | 	content    *string | ||||||
|  | 	channel    *string | ||||||
|  | 	time       *time.Time | ||||||
|  | 	sendername *string | ||||||
|  | 	priority   *int | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Connection) Message(title string) *MessageBuilder { | ||||||
|  | 	return &MessageBuilder{conn: c, title: title} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *MessageBuilder) Channel(channel string) *MessageBuilder { | ||||||
|  | 	c.channel = &channel | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *MessageBuilder) Content(content string) *MessageBuilder { | ||||||
|  | 	c.content = &content | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *MessageBuilder) Time(t time.Time) *MessageBuilder { | ||||||
|  | 	c.time = &t | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *MessageBuilder) SenderName(sn string) *MessageBuilder { | ||||||
|  | 	c.sendername = &sn | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *MessageBuilder) Priority(p int) *MessageBuilder { | ||||||
|  | 	c.priority = &p | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *MessageBuilder) Send(ctx context.Context) (MessageResponse, error) { | ||||||
|  | 	client := http.Client{Timeout: 5 * time.Second} | ||||||
|  |  | ||||||
|  | 	body := langext.H{} | ||||||
|  |  | ||||||
|  | 	body["user_id"] = c.conn.uid | ||||||
|  | 	body["key"] = c.conn.token | ||||||
|  |  | ||||||
|  | 	if c.channel != nil { | ||||||
|  | 		body["channel"] = *c.channel | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	body["title"] = c.title | ||||||
|  |  | ||||||
|  | 	if c.content != nil { | ||||||
|  | 		body["content"] = *c.content | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if c.sendername != nil { | ||||||
|  | 		body["content"] = *c.sendername | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if c.time != nil { | ||||||
|  | 		body["timestamp"] = c.time.Unix() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if c.priority != nil { | ||||||
|  | 		body["priority"] = *c.priority | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	body["msg_id"] = langext.MustHexUUID() | ||||||
|  |  | ||||||
|  | 	rawbody, err := json.Marshal(body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MessageResponse{}, err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	req, err := http.NewRequestWithContext(ctx, "POST", "https://simplecloudnotifier.de/", bytes.NewBuffer(rawbody)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MessageResponse{}, err | ||||||
|  | 	} | ||||||
|  | 	req.Header.Set("Content-Type", "application/json") | ||||||
|  |  | ||||||
|  | 	resp, err := client.Do(req) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MessageResponse{}, err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defer func() { _ = resp.Body.Close() }() | ||||||
|  |  | ||||||
|  | 	if resp.StatusCode >= 200 && resp.StatusCode < 300 { | ||||||
|  |  | ||||||
|  | 		raw, err := io.ReadAll(resp.Body) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return MessageResponse{}, err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		var mr MessageResponse | ||||||
|  | 		err = json.Unmarshal(raw, &mr) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return MessageResponse{}, err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return mr, nil | ||||||
|  | 	} else { | ||||||
|  | 		errMessage := resp.Status | ||||||
|  |  | ||||||
|  | 		if raw, err := io.ReadAll(resp.Body); err == nil { | ||||||
|  | 			var mr MessageErrResponse | ||||||
|  | 			if err = json.Unmarshal(raw, &mr); err == nil { | ||||||
|  | 				errMessage = mr.Message | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if resp.StatusCode == 400 { | ||||||
|  | 			return MessageResponse{}, exerr.New(ErrBadRequest, errMessage).Build() | ||||||
|  | 		} | ||||||
|  | 		if resp.StatusCode == 401 { | ||||||
|  | 			return MessageResponse{}, exerr.New(ErrAuthFailed, errMessage).Build() | ||||||
|  | 		} | ||||||
|  | 		if resp.StatusCode == 403 { | ||||||
|  | 			return MessageResponse{}, exerr.New(ErrQuota, errMessage).Build() | ||||||
|  | 		} | ||||||
|  | 		if resp.StatusCode == 500 { | ||||||
|  | 			return MessageResponse{}, exerr.New(ErrInternalServerErr, errMessage).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return MessageResponse{}, exerr.New(ErrOther, errMessage).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | } | ||||||
							
								
								
									
										24
									
								
								scn/send_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								scn/send_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | |||||||
|  | package scn | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"fmt" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestSendSCN(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	mr, err := New("TODO", "TODO"). | ||||||
|  | 		Message("Hello Test"). | ||||||
|  | 		Content("This is a test / goext"). | ||||||
|  | 		Send(context.Background()) | ||||||
|  |  | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	fmt.Printf("%+v\n", mr) | ||||||
|  |  | ||||||
|  | } | ||||||
							
								
								
									
										120
									
								
								sq/builder.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										120
									
								
								sq/builder.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,120 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"reflect" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn string) (string, PP, error) { | ||||||
|  | 	rval := reflect.ValueOf(obj) | ||||||
|  | 	rtyp := rval.Type() | ||||||
|  |  | ||||||
|  | 	params := PP{} | ||||||
|  |  | ||||||
|  | 	setClauses := make([]string, 0) | ||||||
|  |  | ||||||
|  | 	matchClause := "" | ||||||
|  |  | ||||||
|  | 	for i := 0; i < rtyp.NumField(); i++ { | ||||||
|  |  | ||||||
|  | 		rsfield := rtyp.Field(i) | ||||||
|  | 		rvfield := rval.Field(i) | ||||||
|  |  | ||||||
|  | 		if !rsfield.IsExported() { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		columnName := rsfield.Tag.Get("db") | ||||||
|  | 		if columnName == "" || columnName == "-" { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if idColumn == columnName { | ||||||
|  | 			idValue, err := convertValueToDB(q, rvfield.Interface()) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return "", nil, err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			matchClause = fmt.Sprintf("(%s = :%s)", columnName, params.Add(idValue)) | ||||||
|  |  | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if rsfield.Type.Kind() == reflect.Ptr && rvfield.IsNil() { | ||||||
|  |  | ||||||
|  | 			setClauses = append(setClauses, fmt.Sprintf("%s = NULL", columnName)) | ||||||
|  |  | ||||||
|  | 		} else { | ||||||
|  |  | ||||||
|  | 			val, err := convertValueToDB(q, rvfield.Interface()) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return "", nil, err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			setClauses = append(setClauses, fmt.Sprintf("%s = :%s", columnName, params.Add(val))) | ||||||
|  |  | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if len(setClauses) == 0 { | ||||||
|  | 		return "", nil, exerr.New(exerr.TypeSQLBuild, "no updates clauses found in object").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if matchClause == "" { | ||||||
|  | 		return "", nil, exerr.New(exerr.TypeSQLBuild, "id column not found in object").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	//goland:noinspection SqlNoDataSourceInspection | ||||||
|  | 	return fmt.Sprintf("UPDATE %s SET %s WHERE %s", tableName, strings.Join(setClauses, ", "), matchClause), params, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func BuildInsertStatement(q Queryable, tableName string, obj any) (string, PP, error) { | ||||||
|  | 	rval := reflect.ValueOf(obj) | ||||||
|  | 	rtyp := rval.Type() | ||||||
|  |  | ||||||
|  | 	params := PP{} | ||||||
|  |  | ||||||
|  | 	fields := make([]string, 0) | ||||||
|  | 	values := make([]string, 0) | ||||||
|  |  | ||||||
|  | 	for i := 0; i < rtyp.NumField(); i++ { | ||||||
|  |  | ||||||
|  | 		rsfield := rtyp.Field(i) | ||||||
|  | 		rvfield := rval.Field(i) | ||||||
|  |  | ||||||
|  | 		if !rsfield.IsExported() { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		columnName := rsfield.Tag.Get("db") | ||||||
|  | 		if columnName == "" || columnName == "-" { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if rsfield.Type.Kind() == reflect.Ptr && rvfield.IsNil() { | ||||||
|  |  | ||||||
|  | 			fields = append(fields, columnName) | ||||||
|  | 			values = append(fields, "NULL") | ||||||
|  |  | ||||||
|  | 		} else { | ||||||
|  |  | ||||||
|  | 			val, err := convertValueToDB(q, rvfield.Interface()) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return "", nil, err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			fields = append(fields, columnName) | ||||||
|  | 			values = append(fields, ":"+params.Add(val)) | ||||||
|  |  | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if len(fields) == 0 { | ||||||
|  | 		return "", nil, exerr.New(exerr.TypeSQLBuild, "no fields found in object").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	//goland:noinspection SqlNoDataSourceInspection | ||||||
|  | 	return fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", tableName, strings.Join(fields, ", "), strings.Join(values, ", ")), params, nil | ||||||
|  | } | ||||||
							
								
								
									
										87
									
								
								sq/builder_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										87
									
								
								sq/builder_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,87 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"database/sql" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"github.com/glebarez/go-sqlite" | ||||||
|  | 	"github.com/jmoiron/sqlx" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type dummyQueryable struct { | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dummyQueryable) Exec(ctx context.Context, sql string, prep PP) (sql.Result, error) { | ||||||
|  | 	return nil, errors.New("err") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dummyQueryable) Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error) { | ||||||
|  | 	return nil, errors.New("err") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dummyQueryable) ListConverter() []DBTypeConverter { | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestCreateUpdateStatement(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	type request struct { | ||||||
|  | 		ID        string  `json:"id"        db:"id"` | ||||||
|  | 		Timestamp int     `json:"timestamp" db:"timestamp"` | ||||||
|  | 		StrVal    string  `json:"strVal"    db:"str_val"` | ||||||
|  | 		FloatVal  float64 `json:"floatVal"    db:"float_val"` | ||||||
|  | 		Dummy     bool    `json:"dummyBool" db:"dummy_bool"` | ||||||
|  | 		JsonVal   JsonObj `json:"jsonVal" db:"json_val"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||||
|  | 		sqlite.RegisterAsSQLITE3() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	dbdir := t.TempDir() | ||||||
|  | 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||||
|  |  | ||||||
|  | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
|  | 	db := NewDB(xdb) | ||||||
|  | 	db.RegisterDefaultConverter() | ||||||
|  |  | ||||||
|  | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	sqlStr, pp, err := BuildUpdateStatement(db, "requests", request{ | ||||||
|  | 		ID:        "9927", | ||||||
|  | 		Timestamp: 12321, | ||||||
|  | 		StrVal:    "hello world", | ||||||
|  | 		Dummy:     true, | ||||||
|  | 		FloatVal:  3.14159, | ||||||
|  | 		JsonVal: JsonObj{ | ||||||
|  | 			"firs":   1, | ||||||
|  | 			"second": true, | ||||||
|  | 		}, | ||||||
|  | 	}, "id") | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("\n\n") | ||||||
|  |  | ||||||
|  | 	fmt.Printf("######## PP:\n") | ||||||
|  | 	for k, v := range pp { | ||||||
|  | 		fmt.Printf("['%s']       =>       %+v\n", k, v) | ||||||
|  | 	} | ||||||
|  | 	fmt.Printf("\n\n") | ||||||
|  |  | ||||||
|  | 	fmt.Printf("######## SQL:\n%+v\n\n", sqlStr) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("\n\n") | ||||||
|  |  | ||||||
|  | } | ||||||
							
								
								
									
										153
									
								
								sq/converter.go
									
									
									
									
									
								
							
							
						
						
									
										153
									
								
								sq/converter.go
									
									
									
									
									
								
							| @@ -1,20 +1,16 @@ | |||||||
| package sq | package sq | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"encoding/json" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
|  | 	"reflect" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| //TODO UNFINISHED |  | ||||||
| // this is not finished |  | ||||||
| // idea was that we can register converter in the database struct |  | ||||||
| // they get inherited from the transactions |  | ||||||
| // and when marshallingunmarshaling (sq.Query | sq.QueryAll) |  | ||||||
| // or marshaling (sq.InsertSingle) |  | ||||||
| // the types get converter automatically... |  | ||||||
|  |  | ||||||
| type DBTypeConverter interface { | type DBTypeConverter interface { | ||||||
| 	ModelTypeString() string | 	ModelTypeString() string | ||||||
| 	DBTypeString() string | 	DBTypeString() string | ||||||
| @@ -40,16 +36,108 @@ var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time | |||||||
| 	return time.UnixMilli(v), nil | 	return time.UnixMilli(v), nil | ||||||
| }) | }) | ||||||
|  |  | ||||||
| var ConverterOptTimeToUnixMillis = NewDBTypeConverter[*time.Time, *int64](func(v *time.Time) (*int64, error) { | var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) { | ||||||
| 	if v == nil { | 	return v.UnixMilli(), nil | ||||||
| 		return nil, nil | }, func(v int64) (rfctime.UnixMilliTime, error) { | ||||||
|  | 	return rfctime.NewUnixMilli(time.UnixMilli(v)), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) { | ||||||
|  | 	return v.UnixNano(), nil | ||||||
|  | }, func(v int64) (rfctime.UnixNanoTime, error) { | ||||||
|  | 	return rfctime.NewUnixNano(time.Unix(0, v)), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) { | ||||||
|  | 	return v.Unix(), nil | ||||||
|  | }, func(v int64) (rfctime.UnixTime, error) { | ||||||
|  | 	return rfctime.NewUnix(time.Unix(v, 0)), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | // ConverterRFC339TimeToString | ||||||
|  | // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||||
|  | var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) { | ||||||
|  | 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil | ||||||
|  | }, func(v string) (rfctime.RFC3339Time, error) { | ||||||
|  | 	t, err := time.Parse("2006-01-02 15:04:05", v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return rfctime.RFC3339Time{}, err | ||||||
| 	} | 	} | ||||||
| 	return langext.Ptr(v.UnixMilli()), nil | 	return rfctime.NewRFC3339(t), nil | ||||||
| }, func(v *int64) (*time.Time, error) { | }) | ||||||
| 	if v == nil { |  | ||||||
| 		return nil, nil | // ConverterRFC339NanoTimeToString | ||||||
|  | // Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter | ||||||
|  | var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) { | ||||||
|  | 	return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil | ||||||
|  | }, func(v string) (rfctime.RFC3339NanoTime, error) { | ||||||
|  | 	t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return rfctime.RFC3339NanoTime{}, err | ||||||
| 	} | 	} | ||||||
| 	return langext.Ptr(time.UnixMilli(*v)), nil | 	return rfctime.NewRFC3339Nano(t), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterJsonObjToString = NewDBTypeConverter[JsonObj, string](func(v JsonObj) (string, error) { | ||||||
|  | 	mrsh, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  | 	return string(mrsh), nil | ||||||
|  | }, func(v string) (JsonObj, error) { | ||||||
|  | 	var mrsh JsonObj | ||||||
|  | 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||||
|  | 		return JsonObj{}, err | ||||||
|  | 	} | ||||||
|  | 	return mrsh, nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterJsonArrToString = NewDBTypeConverter[JsonArr, string](func(v JsonArr) (string, error) { | ||||||
|  | 	mrsh, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  | 	return string(mrsh), nil | ||||||
|  | }, func(v string) (JsonArr, error) { | ||||||
|  | 	var mrsh JsonArr | ||||||
|  | 	if err := json.Unmarshal([]byte(v), &mrsh); err != nil { | ||||||
|  | 		return JsonArr{}, err | ||||||
|  | 	} | ||||||
|  | 	return mrsh, nil | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) { | ||||||
|  | 	return v.Category, nil | ||||||
|  | }, func(v string) (exerr.ErrorCategory, error) { | ||||||
|  | 	for _, cat := range exerr.AllCategories { | ||||||
|  | 		if cat.Category == v { | ||||||
|  | 			return cat, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory") | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) { | ||||||
|  | 	return v.Severity, nil | ||||||
|  | }, func(v string) (exerr.ErrorSeverity, error) { | ||||||
|  | 	for _, sev := range exerr.AllSeverities { | ||||||
|  | 		if sev.Severity == v { | ||||||
|  | 			return sev, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity") | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) { | ||||||
|  | 	return v.Key, nil | ||||||
|  | }, func(v string) (exerr.ErrorType, error) { | ||||||
|  | 	for _, etp := range exerr.ListRegisteredTypes() { | ||||||
|  | 		if etp.Key == v { | ||||||
|  | 			return etp, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return exerr.NewType(v, nil), nil | ||||||
| }) | }) | ||||||
|  |  | ||||||
| type dbTypeConverterImpl[TModelData any, TDBData any] struct { | type dbTypeConverterImpl[TModelData any, TDBData any] struct { | ||||||
| @@ -89,3 +177,36 @@ func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TD | |||||||
| 		tomodel:         tomodel, | 		tomodel:         tomodel, | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func convertValueToDB(q Queryable, value any) (any, error) { | ||||||
|  | 	modelTypeStr := fmt.Sprintf("%T", value) | ||||||
|  |  | ||||||
|  | 	for _, conv := range q.ListConverter() { | ||||||
|  | 		if conv.ModelTypeString() == modelTypeStr { | ||||||
|  | 			return conv.ModelToDB(value) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if value != nil && reflect.TypeOf(value).Kind() == reflect.Ptr { | ||||||
|  | 		vof := reflect.ValueOf(value) | ||||||
|  | 		if vof.IsNil() { | ||||||
|  | 			return nil, nil | ||||||
|  | 		} else { | ||||||
|  | 			return convertValueToDB(q, vof.Elem().Interface()) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return value, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func convertValueToModel(q Queryable, value any, destinationType string) (any, error) { | ||||||
|  | 	dbTypeString := fmt.Sprintf("%T", value) | ||||||
|  |  | ||||||
|  | 	for _, conv := range q.ListConverter() { | ||||||
|  | 		if conv.ModelTypeString() == destinationType && conv.DBTypeString() == dbTypeString { | ||||||
|  | 			return conv.DBToModel(value) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return value, nil | ||||||
|  | } | ||||||
|   | |||||||
| @@ -4,16 +4,20 @@ import ( | |||||||
| 	"context" | 	"context" | ||||||
| 	"database/sql" | 	"database/sql" | ||||||
| 	"github.com/jmoiron/sqlx" | 	"github.com/jmoiron/sqlx" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"sync" | 	"sync" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type DB interface { | type DB interface { | ||||||
| 	Exec(ctx context.Context, sql string, prep PP) (sql.Result, error) | 	Queryable | ||||||
| 	Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error) |  | ||||||
| 	Ping(ctx context.Context) error | 	Ping(ctx context.Context) error | ||||||
| 	BeginTransaction(ctx context.Context, iso sql.IsolationLevel) (Tx, error) | 	BeginTransaction(ctx context.Context, iso sql.IsolationLevel) (Tx, error) | ||||||
| 	AddListener(listener Listener) | 	AddListener(listener Listener) | ||||||
| 	Exit() error | 	Exit() error | ||||||
|  | 	RegisterConverter(DBTypeConverter) | ||||||
|  | 	RegisterDefaultConverter() | ||||||
| } | } | ||||||
|  |  | ||||||
| type database struct { | type database struct { | ||||||
| @@ -21,6 +25,7 @@ type database struct { | |||||||
| 	txctr uint16 | 	txctr uint16 | ||||||
| 	lock  sync.Mutex | 	lock  sync.Mutex | ||||||
| 	lstr  []Listener | 	lstr  []Listener | ||||||
|  | 	conv  []DBTypeConverter | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewDB(db *sqlx.DB) DB { | func NewDB(db *sqlx.DB) DB { | ||||||
| @@ -41,7 +46,7 @@ func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Resul | |||||||
| 	for _, v := range db.lstr { | 	for _, v := range db.lstr { | ||||||
| 		err := v.PreExec(ctx, nil, &sqlstr, &prep) | 		err := v.PreExec(ctx, nil, &sqlstr, &prep) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -52,7 +57,7 @@ func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Resul | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 	} | 	} | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
| @@ -62,7 +67,7 @@ func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Ro | |||||||
| 	for _, v := range db.lstr { | 	for _, v := range db.lstr { | ||||||
| 		err := v.PreQuery(ctx, nil, &sqlstr, &prep) | 		err := v.PreQuery(ctx, nil, &sqlstr, &prep) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -73,7 +78,7 @@ func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Ro | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 	} | 	} | ||||||
| 	return rows, nil | 	return rows, nil | ||||||
| } | } | ||||||
| @@ -93,7 +98,7 @@ func (db *database) Ping(ctx context.Context) error { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return exerr.Wrap(err, "Failed to [ping] sql database").Build() | ||||||
| 	} | 	} | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
| @@ -113,16 +118,40 @@ func (db *database) BeginTransaction(ctx context.Context, iso sql.IsolationLevel | |||||||
|  |  | ||||||
| 	xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso}) | 	xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso}) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to start sql transaction").Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	for _, v := range db.lstr { | 	for _, v := range db.lstr { | ||||||
| 		v.PostTxBegin(txid, err) | 		v.PostTxBegin(txid, err) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return NewTransaction(xtx, txid, db.lstr), nil | 	return NewTransaction(xtx, txid, db), nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func (db *database) Exit() error { | func (db *database) Exit() error { | ||||||
| 	return db.db.Close() | 	return db.db.Close() | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (db *database) ListConverter() []DBTypeConverter { | ||||||
|  | 	return db.conv | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (db *database) RegisterConverter(conv DBTypeConverter) { | ||||||
|  | 	db.conv = langext.ArrFilter(db.conv, func(v DBTypeConverter) bool { return v.ModelTypeString() != conv.ModelTypeString() }) | ||||||
|  | 	db.conv = append(db.conv, conv) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (db *database) RegisterDefaultConverter() { | ||||||
|  | 	db.RegisterConverter(ConverterBoolToBit) | ||||||
|  | 	db.RegisterConverter(ConverterTimeToUnixMillis) | ||||||
|  | 	db.RegisterConverter(ConverterRFCUnixMilliTimeToUnixMillis) | ||||||
|  | 	db.RegisterConverter(ConverterRFCUnixNanoTimeToUnixNanos) | ||||||
|  | 	db.RegisterConverter(ConverterRFCUnixTimeToUnixSeconds) | ||||||
|  | 	db.RegisterConverter(ConverterRFC339TimeToString) | ||||||
|  | 	db.RegisterConverter(ConverterRFC339NanoTimeToString) | ||||||
|  | 	db.RegisterConverter(ConverterJsonObjToString) | ||||||
|  | 	db.RegisterConverter(ConverterJsonArrToString) | ||||||
|  | 	db.RegisterConverter(ConverterExErrCategoryToString) | ||||||
|  | 	db.RegisterConverter(ConverterExErrSeverityToString) | ||||||
|  | 	db.RegisterConverter(ConverterExErrTypeToString) | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										40
									
								
								sq/hasher.go
									
									
									
									
									
								
							
							
						
						
									
										40
									
								
								sq/hasher.go
									
									
									
									
									
								
							| @@ -13,7 +13,9 @@ import ( | |||||||
| 	"strings" | 	"strings" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func HashSqliteSchema(ctx context.Context, schemaStr string) (string, error) { | // HashMattnSqliteSchema | ||||||
|  | // use if github.com/glebarez/go-sqlite | ||||||
|  | func HashMattnSqliteSchema(ctx context.Context, schemaStr string) (string, error) { | ||||||
| 	dbdir := os.TempDir() | 	dbdir := os.TempDir() | ||||||
| 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||||
|  |  | ||||||
| @@ -39,6 +41,34 @@ func HashSqliteSchema(ctx context.Context, schemaStr string) (string, error) { | |||||||
| 	return HashSqliteDatabase(ctx, db) | 	return HashSqliteDatabase(ctx, db) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // HashGoSqliteSchema | ||||||
|  | // use if mattn/go-sqlite3 | ||||||
|  | func HashGoSqliteSchema(ctx context.Context, schemaStr string) (string, error) { | ||||||
|  | 	dbdir := os.TempDir() | ||||||
|  | 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||||
|  |  | ||||||
|  | 	err := os.MkdirAll(dbdir, os.ModePerm) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||||
|  |  | ||||||
|  | 	xdb, err := sqlx.Open("sqlite", url) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	db := NewDB(xdb) | ||||||
|  |  | ||||||
|  | 	_, err = db.Exec(ctx, schemaStr, PP{}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return HashSqliteDatabase(ctx, db) | ||||||
|  | } | ||||||
|  |  | ||||||
| func HashSqliteDatabase(ctx context.Context, db Queryable) (string, error) { | func HashSqliteDatabase(ctx context.Context, db Queryable) (string, error) { | ||||||
| 	ss, err := CreateSqliteDatabaseSchemaString(ctx, db) | 	ss, err := CreateSqliteDatabaseSchemaString(ctx, db) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -91,7 +121,7 @@ func CreateSqliteDatabaseSchemaString(ctx context.Context, db Queryable) (string | |||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return "", err | 		return "", err | ||||||
| 	} | 	} | ||||||
| 	tableList, err := ScanAll[tabInfo](rowsTableList, SModeFast, Unsafe, true) | 	tableList, err := ScanAll[tabInfo](ctx, db, rowsTableList, SModeFast, Unsafe, true) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return "", err | 		return "", err | ||||||
| 	} | 	} | ||||||
| @@ -113,7 +143,7 @@ func CreateSqliteDatabaseSchemaString(ctx context.Context, db Queryable) (string | |||||||
| 				return "", err | 				return "", err | ||||||
| 			} | 			} | ||||||
|  |  | ||||||
| 			columnList, err := ScanAll[colInfo](rowsColumnList, SModeFast, Unsafe, true) | 			columnList, err := ScanAll[colInfo](ctx, db, rowsColumnList, SModeFast, Unsafe, true) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return "", err | 				return "", err | ||||||
| 			} | 			} | ||||||
| @@ -128,7 +158,7 @@ func CreateSqliteDatabaseSchemaString(ctx context.Context, db Queryable) (string | |||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return "", err | 				return "", err | ||||||
| 			} | 			} | ||||||
| 			idxList, err := ScanAll[idxInfo](rowsIdxList, SModeFast, Unsafe, true) | 			idxList, err := ScanAll[idxInfo](ctx, db, rowsIdxList, SModeFast, Unsafe, true) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return "", err | 				return "", err | ||||||
| 			} | 			} | ||||||
| @@ -143,7 +173,7 @@ func CreateSqliteDatabaseSchemaString(ctx context.Context, db Queryable) (string | |||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return "", err | 				return "", err | ||||||
| 			} | 			} | ||||||
| 			fkyList, err := ScanAll[fkyInfo](rowsIdxList, SModeFast, Unsafe, true) | 			fkyList, err := ScanAll[fkyInfo](ctx, db, rowsIdxList, SModeFast, Unsafe, true) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return "", err | 				return "", err | ||||||
| 			} | 			} | ||||||
|   | |||||||
							
								
								
									
										5
									
								
								sq/json.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								sq/json.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | type JsonObj map[string]any | ||||||
|  |  | ||||||
|  | type JsonArr []any | ||||||
							
								
								
									
										126
									
								
								sq/paginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										126
									
								
								sq/paginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,126 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"fmt" | ||||||
|  | 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PaginateFilter interface { | ||||||
|  | 	SQL(params PP) (filterClause string, joinClause string, joinTables []string) | ||||||
|  | 	Sort() []FilterSort | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type FilterSort struct { | ||||||
|  | 	Field     string | ||||||
|  | 	Direction ct.SortDirection | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||||
|  | 	prepParams := PP{} | ||||||
|  |  | ||||||
|  | 	sortOrder := filter.Sort() | ||||||
|  | 	sortCond := "" | ||||||
|  | 	if len(sortOrder) > 0 { | ||||||
|  | 		sortCond = "ORDER BY " | ||||||
|  | 		for i, v := range sortOrder { | ||||||
|  | 			if i > 0 { | ||||||
|  | 				sortCond += ", " | ||||||
|  | 			} | ||||||
|  | 			sortCond += v.Field + " " + string(v.Direction) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pageCond := "" | ||||||
|  | 	if limit != nil { | ||||||
|  | 		pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1))) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	filterCond, joinCond, joinTables := filter.SQL(prepParams) | ||||||
|  |  | ||||||
|  | 	selectCond := table + ".*" | ||||||
|  | 	for _, v := range joinTables { | ||||||
|  | 		selectCond += ", " + v + ".*" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond | ||||||
|  | 	sqlQueryCount := "SELECT " + "COUNT(*)" + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " | ||||||
|  |  | ||||||
|  | 	rows, err := q.Query(ctx, sqlQueryData, prepParams) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	entities, err := ScanAll[TData](ctx, q, rows, scanMode, scanSec, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode paginated entries from DB").Str("table", table).Int("page", page).Any("limit", limit).Str("scanMode", string(scanMode)).Str("scanSec", string(scanSec)).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if page == 1 && (limit == nil || len(entities) <= *limit) { | ||||||
|  | 		return entities, pag.Pagination{ | ||||||
|  | 			Page:             1, | ||||||
|  | 			Limit:            langext.Coalesce(limit, len(entities)), | ||||||
|  | 			TotalPages:       1, | ||||||
|  | 			TotalItems:       len(entities), | ||||||
|  | 			CurrentPageCount: 1, | ||||||
|  | 		}, nil | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		countRows, err := q.Query(ctx, sqlQueryCount, prepParams) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to query total-count of paginated entries from DB").Str("table", table).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if !countRows.Next() { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.New(exerr.TypeSQLDecode, "SQL COUNT(*) query returned no rows").Str("table", table).Any("filter", filter).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		var countRes int | ||||||
|  | 		err = countRows.Scan(&countRes) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode total-count of paginated entries from DB").Str("table", table).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if len(entities) > *limit { | ||||||
|  | 			entities = entities[:*limit] | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		paginationObj := pag.Pagination{ | ||||||
|  | 			Page:             page, | ||||||
|  | 			Limit:            langext.Coalesce(limit, countRes), | ||||||
|  | 			TotalPages:       pag.CalcPaginationTotalPages(countRes, langext.Coalesce(limit, countRes)), | ||||||
|  | 			TotalItems:       countRes, | ||||||
|  | 			CurrentPageCount: len(entities), | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return entities, paginationObj, nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | ||||||
|  | 	prepParams := PP{} | ||||||
|  |  | ||||||
|  | 	filterCond, joinCond, _ := filter.SQL(prepParams) | ||||||
|  |  | ||||||
|  | 	sqlQueryCount := "SELECT " + "COUNT(*)" + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " )" | ||||||
|  |  | ||||||
|  | 	countRows, err := q.Query(ctx, sqlQueryCount, prepParams) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, exerr.Wrap(err, "failed to query count of entries from DB").Str("table", table).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !countRows.Next() { | ||||||
|  | 		return 0, exerr.New(exerr.TypeSQLDecode, "SQL COUNT(*) query returned no rows").Str("table", table).Any("filter", filter).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var countRes int | ||||||
|  | 	err = countRows.Scan(&countRes) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, exerr.Wrap(err, "failed to decode count of entries from DB").Str("table", table).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return countRes, nil | ||||||
|  | } | ||||||
							
								
								
									
										12
									
								
								sq/params.go
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								sq/params.go
									
									
									
									
									
								
							| @@ -1,5 +1,7 @@ | |||||||
| package sq | package sq | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  |  | ||||||
| type PP map[string]any | type PP map[string]any | ||||||
|  |  | ||||||
| func Join(pps ...PP) PP { | func Join(pps ...PP) PP { | ||||||
| @@ -11,3 +13,13 @@ func Join(pps ...PP) PP { | |||||||
| 	} | 	} | ||||||
| 	return r | 	return r | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (pp *PP) Add(v any) string { | ||||||
|  | 	id := PPID() | ||||||
|  | 	(*pp)[id] = v | ||||||
|  | 	return id | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PPID() string { | ||||||
|  | 	return "p_" + langext.RandBase62(8) | ||||||
|  | } | ||||||
|   | |||||||
| @@ -9,4 +9,5 @@ import ( | |||||||
| type Queryable interface { | type Queryable interface { | ||||||
| 	Exec(ctx context.Context, sql string, prep PP) (sql.Result, error) | 	Exec(ctx context.Context, sql string, prep PP) (sql.Result, error) | ||||||
| 	Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error) | 	Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error) | ||||||
|  | 	ListConverter() []DBTypeConverter | ||||||
| } | } | ||||||
|   | |||||||
| @@ -13,8 +13,8 @@ import ( | |||||||
| type StructScanMode string | type StructScanMode string | ||||||
|  |  | ||||||
| const ( | const ( | ||||||
| 	SModeFast     StructScanMode = "FAST" | 	SModeFast     StructScanMode = "FAST"     // Use default sq.Scan, does not work with joined/resolved types and/or custom value converter | ||||||
| 	SModeExtended StructScanMode = "EXTENDED" | 	SModeExtended StructScanMode = "EXTENDED" // Fully featured perhaps (?) a tiny bit slower - default | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type StructScanSafety string | type StructScanSafety string | ||||||
| @@ -51,7 +51,13 @@ func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, | |||||||
|  |  | ||||||
| 		columns = append(columns, "\""+columnName+"\"") | 		columns = append(columns, "\""+columnName+"\"") | ||||||
| 		params = append(params, ":"+paramkey) | 		params = append(params, ":"+paramkey) | ||||||
| 		pp[paramkey] = rvfield.Interface() |  | ||||||
|  | 		val, err := convertValueToDB(q, rvfield.Interface()) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		pp[paramkey] = val | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -71,7 +77,7 @@ func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, | |||||||
| 		return *new(TData), err | 		return *new(TData), err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	data, err := ScanSingle[TData](rows, mode, sec, true) | 	data, err := ScanSingle[TData](ctx, q, rows, mode, sec, true) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return *new(TData), err | 		return *new(TData), err | ||||||
| 	} | 	} | ||||||
| @@ -85,7 +91,7 @@ func QueryAll[TData any](ctx context.Context, q Queryable, sql string, pp PP, mo | |||||||
| 		return nil, err | 		return nil, err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	data, err := ScanAll[TData](rows, mode, sec, true) | 	data, err := ScanAll[TData](ctx, q, rows, mode, sec, true) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, err | ||||||
| 	} | 	} | ||||||
| @@ -93,7 +99,7 @@ func QueryAll[TData any](ctx context.Context, q Queryable, sql string, pp PP, mo | |||||||
| 	return data, nil | 	return data, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) (TData, error) { | func ScanSingle[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) (TData, error) { | ||||||
| 	if rows.Next() { | 	if rows.Next() { | ||||||
| 		var strscan *StructScanner | 		var strscan *StructScanner | ||||||
|  |  | ||||||
| @@ -123,7 +129,7 @@ func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanS | |||||||
| 				return *new(TData), err | 				return *new(TData), err | ||||||
| 			} | 			} | ||||||
| 		} else if mode == SModeExtended { | 		} else if mode == SModeExtended { | ||||||
| 			err := strscan.StructScanExt(&data) | 			err := strscan.StructScanExt(q, &data) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return *new(TData), err | 				return *new(TData), err | ||||||
| 			} | 			} | ||||||
| @@ -149,6 +155,10 @@ func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanS | |||||||
| 			return *new(TData), err | 			return *new(TData), err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if err := ctx.Err(); err != nil { | ||||||
|  | 			return *new(TData), err | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		return data, nil | 		return data, nil | ||||||
|  |  | ||||||
| 	} else { | 	} else { | ||||||
| @@ -159,7 +169,7 @@ func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanS | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func ScanAll[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) ([]TData, error) { | func ScanAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) ([]TData, error) { | ||||||
| 	var strscan *StructScanner | 	var strscan *StructScanner | ||||||
|  |  | ||||||
| 	if sec == Safe { | 	if sec == Safe { | ||||||
| @@ -182,6 +192,11 @@ func ScanAll[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafe | |||||||
|  |  | ||||||
| 	res := make([]TData, 0) | 	res := make([]TData, 0) | ||||||
| 	for rows.Next() { | 	for rows.Next() { | ||||||
|  |  | ||||||
|  | 		if err := ctx.Err(); err != nil { | ||||||
|  | 			return nil, err | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		if mode == SModeFast { | 		if mode == SModeFast { | ||||||
| 			var data TData | 			var data TData | ||||||
| 			err := strscan.StructScanBase(&data) | 			err := strscan.StructScanBase(&data) | ||||||
| @@ -191,7 +206,7 @@ func ScanAll[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafe | |||||||
| 			res = append(res, data) | 			res = append(res, data) | ||||||
| 		} else if mode == SModeExtended { | 		} else if mode == SModeExtended { | ||||||
| 			var data TData | 			var data TData | ||||||
| 			err := strscan.StructScanExt(&data) | 			err := strscan.StructScanExt(q, &data) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return nil, err | 				return nil, err | ||||||
| 			} | 			} | ||||||
|   | |||||||
							
								
								
									
										154
									
								
								sq/sq_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										154
									
								
								sq/sq_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,154 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"database/sql" | ||||||
|  | 	"fmt" | ||||||
|  | 	"github.com/glebarez/go-sqlite" | ||||||
|  | 	"github.com/jmoiron/sqlx" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestTypeConverter1(t *testing.T) { | ||||||
|  | 	type RequestData struct { | ||||||
|  | 		ID        string    `db:"id"` | ||||||
|  | 		Timestamp time.Time `db:"timestamp"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||||
|  | 		sqlite.RegisterAsSQLITE3() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	dbdir := t.TempDir() | ||||||
|  | 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, os.MkdirAll(dbdir, os.ModePerm)) | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||||
|  |  | ||||||
|  | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
|  | 	db := NewDB(xdb) | ||||||
|  | 	db.RegisterDefaultConverter() | ||||||
|  |  | ||||||
|  | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||||
|  | 		ID:        "001", | ||||||
|  | 		Timestamp: time.Date(2000, 06, 15, 12, 0, 0, 0, time.UTC), | ||||||
|  | 	}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestTypeConverter2(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||||
|  | 		sqlite.RegisterAsSQLITE3() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	type RequestData struct { | ||||||
|  | 		ID        string                `db:"id"` | ||||||
|  | 		Timestamp rfctime.UnixMilliTime `db:"timestamp"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	dbdir := t.TempDir() | ||||||
|  | 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, os.MkdirAll(dbdir, os.ModePerm)) | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||||
|  |  | ||||||
|  | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
|  | 	db := NewDB(xdb) | ||||||
|  | 	db.RegisterDefaultConverter() | ||||||
|  |  | ||||||
|  | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t0 := rfctime.NewUnixMilli(time.Date(2012, 03, 01, 16, 0, 0, 0, time.UTC)) | ||||||
|  |  | ||||||
|  | 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||||
|  | 		ID:        "002", | ||||||
|  | 		Timestamp: t0, | ||||||
|  | 	}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	r, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '002'", PP{}, SModeExtended, Safe) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("%+v\n", r) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, "002", r.ID) | ||||||
|  | 	tst.AssertEqual(t, t0.UnixNano(), r.Timestamp.UnixNano()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestTypeConverter3(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||||
|  | 		sqlite.RegisterAsSQLITE3() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	type RequestData struct { | ||||||
|  | 		ID        string                 `db:"id"` | ||||||
|  | 		Timestamp *rfctime.UnixMilliTime `db:"timestamp"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	dbdir := t.TempDir() | ||||||
|  | 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, os.MkdirAll(dbdir, os.ModePerm)) | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||||
|  |  | ||||||
|  | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
|  | 	db := NewDB(xdb) | ||||||
|  | 	db.RegisterDefaultConverter() | ||||||
|  |  | ||||||
|  | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t0 := rfctime.NewUnixMilli(time.Date(2012, 03, 01, 16, 0, 0, 0, time.UTC)) | ||||||
|  |  | ||||||
|  | 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||||
|  | 		ID:        "001", | ||||||
|  | 		Timestamp: &t0, | ||||||
|  | 	}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||||
|  | 		ID:        "002", | ||||||
|  | 		Timestamp: nil, | ||||||
|  | 	}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	{ | ||||||
|  | 		r1, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '001'", PP{}, SModeExtended, Safe) | ||||||
|  | 		tst.AssertNoErr(t, err) | ||||||
|  | 		fmt.Printf("%+v\n", r1) | ||||||
|  | 		tst.AssertEqual(t, "001", r1.ID) | ||||||
|  | 		tst.AssertEqual(t, t0.UnixNano(), r1.Timestamp.UnixNano()) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	{ | ||||||
|  | 		r2, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '002'", PP{}, SModeExtended, Safe) | ||||||
|  | 		tst.AssertNoErr(t, err) | ||||||
|  | 		fmt.Printf("%+v\n", r2) | ||||||
|  | 		tst.AssertEqual(t, "002", r2.ID) | ||||||
|  | 		tst.AssertEqual(t, nil, r2.Timestamp) | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -5,7 +5,9 @@ import ( | |||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/jmoiron/sqlx" | 	"github.com/jmoiron/sqlx" | ||||||
| 	"github.com/jmoiron/sqlx/reflectx" | 	"github.com/jmoiron/sqlx/reflectx" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"reflect" | 	"reflect" | ||||||
|  | 	"strings" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| // forked from sqlx, but added ability to unmarshal optional-nested structs | // forked from sqlx, but added ability to unmarshal optional-nested structs | ||||||
| @@ -17,6 +19,7 @@ type StructScanner struct { | |||||||
|  |  | ||||||
| 	fields    [][]int | 	fields    [][]int | ||||||
| 	values    []any | 	values    []any | ||||||
|  | 	converter []ssConverter | ||||||
| 	columns   []string | 	columns   []string | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -28,6 +31,11 @@ func NewStructScanner(rows *sqlx.Rows, unsafe bool) *StructScanner { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type ssConverter struct { | ||||||
|  | 	Converter DBTypeConverter | ||||||
|  | 	RefCount  int | ||||||
|  | } | ||||||
|  |  | ||||||
| func (r *StructScanner) Start(dest any) error { | func (r *StructScanner) Start(dest any) error { | ||||||
| 	v := reflect.ValueOf(dest) | 	v := reflect.ValueOf(dest) | ||||||
|  |  | ||||||
| @@ -47,13 +55,15 @@ func (r *StructScanner) Start(dest any) error { | |||||||
| 		return fmt.Errorf("missing destination name %s in %T", columns[f], dest) | 		return fmt.Errorf("missing destination name %s in %T", columns[f], dest) | ||||||
| 	} | 	} | ||||||
| 	r.values = make([]interface{}, len(columns)) | 	r.values = make([]interface{}, len(columns)) | ||||||
|  | 	r.converter = make([]ssConverter, len(columns)) | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| // StructScanExt forked from github.com/jmoiron/sqlx@v1.3.5/sqlx.go | // StructScanExt forked from github.com/jmoiron/sqlx@v1.3.5/sqlx.go | ||||||
| // does also wok with nullabel structs (from LEFT JOIN's) | // does also work with nullabel structs (from LEFT JOIN's) | ||||||
| func (r *StructScanner) StructScanExt(dest any) error { | // does also work with custom value converters | ||||||
|  | func (r *StructScanner) StructScanExt(q Queryable, dest any) error { | ||||||
| 	v := reflect.ValueOf(dest) | 	v := reflect.ValueOf(dest) | ||||||
|  |  | ||||||
| 	if v.Kind() != reflect.Ptr { | 	if v.Kind() != reflect.Ptr { | ||||||
| @@ -64,7 +74,7 @@ func (r *StructScanner) StructScanExt(dest any) error { | |||||||
|  |  | ||||||
| 	v = v.Elem() | 	v = v.Elem() | ||||||
|  |  | ||||||
| 	err := fieldsByTraversalExtended(v, r.fields, r.values) | 	err := fieldsByTraversalExtended(q, v, r.fields, r.values, r.converter) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
| @@ -131,7 +141,6 @@ func (r *StructScanner) StructScanExt(dest any) error { | |||||||
|  |  | ||||||
| 		val1 := reflect.ValueOf(r.values[i]) | 		val1 := reflect.ValueOf(r.values[i]) | ||||||
| 		val2 := val1.Elem() | 		val2 := val1.Elem() | ||||||
| 		val3 := val2.Elem() |  | ||||||
|  |  | ||||||
| 		if val2.IsNil() { | 		if val2.IsNil() { | ||||||
| 			if f.Kind() != reflect.Pointer { | 			if f.Kind() != reflect.Pointer { | ||||||
| @@ -140,7 +149,22 @@ func (r *StructScanner) StructScanExt(dest any) error { | |||||||
|  |  | ||||||
| 			f.Set(reflect.Zero(f.Type())) // set to nil | 			f.Set(reflect.Zero(f.Type())) // set to nil | ||||||
| 		} else { | 		} else { | ||||||
| 			f.Set(val3) | 			if r.converter[i].Converter != nil { | ||||||
|  | 				val3 := val2.Elem() | ||||||
|  | 				conv3, err := r.converter[i].Converter.DBToModel(val3.Interface()) | ||||||
|  | 				if err != nil { | ||||||
|  | 					return err | ||||||
|  | 				} | ||||||
|  | 				conv3RVal := reflect.ValueOf(conv3) | ||||||
|  | 				for j := 0; j < r.converter[i].RefCount; j++ { | ||||||
|  | 					newConv3Val := reflect.New(conv3RVal.Type()) | ||||||
|  | 					newConv3Val.Elem().Set(conv3RVal) | ||||||
|  | 					conv3RVal = newConv3Val | ||||||
|  | 				} | ||||||
|  | 				f.Set(conv3RVal) | ||||||
|  | 			} else { | ||||||
|  | 				f.Set(val2.Elem()) | ||||||
|  | 			} | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
| @@ -172,7 +196,7 @@ func (r *StructScanner) StructScanBase(dest any) error { | |||||||
| } | } | ||||||
|  |  | ||||||
| // fieldsByTraversal forked from github.com/jmoiron/sqlx@v1.3.5/sqlx.go | // fieldsByTraversal forked from github.com/jmoiron/sqlx@v1.3.5/sqlx.go | ||||||
| func fieldsByTraversalExtended(v reflect.Value, traversals [][]int, values []interface{}) error { | func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, values []interface{}, converter []ssConverter) error { | ||||||
| 	v = reflect.Indirect(v) | 	v = reflect.Indirect(v) | ||||||
| 	if v.Kind() != reflect.Struct { | 	if v.Kind() != reflect.Struct { | ||||||
| 		return errors.New("argument not a struct") | 		return errors.New("argument not a struct") | ||||||
| @@ -185,7 +209,35 @@ func fieldsByTraversalExtended(v reflect.Value, traversals [][]int, values []int | |||||||
| 		} | 		} | ||||||
| 		f := reflectx.FieldByIndexes(v, traversal) | 		f := reflectx.FieldByIndexes(v, traversal) | ||||||
|  |  | ||||||
|  | 		typeStr := f.Type().String() | ||||||
|  |  | ||||||
|  | 		foundConverter := false | ||||||
|  | 		for _, conv := range q.ListConverter() { | ||||||
|  | 			if conv.ModelTypeString() == typeStr { | ||||||
|  | 				_v := langext.Ptr[any](nil) | ||||||
|  | 				values[i] = _v | ||||||
|  | 				foundConverter = true | ||||||
|  | 				converter[i] = ssConverter{Converter: conv, RefCount: 0} | ||||||
|  | 				break | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 		if !foundConverter { | ||||||
|  | 			// also allow non-pointer converter for pointer-types | ||||||
|  | 			for _, conv := range q.ListConverter() { | ||||||
|  | 				if conv.ModelTypeString() == strings.TrimLeft(typeStr, "*") { | ||||||
|  | 					_v := langext.Ptr[any](nil) | ||||||
|  | 					values[i] = _v | ||||||
|  | 					foundConverter = true | ||||||
|  | 					converter[i] = ssConverter{Converter: conv, RefCount: len(typeStr) - len(strings.TrimLeft(typeStr, "*"))} // kind hacky way to get the amount of ptr before <f>, but it works... | ||||||
|  | 					break | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if !foundConverter { | ||||||
| 			values[i] = reflect.New(reflect.PointerTo(f.Type())).Interface() | 			values[i] = reflect.New(reflect.PointerTo(f.Type())).Interface() | ||||||
|  | 			converter[i] = ssConverter{Converter: nil, RefCount: -1} | ||||||
|  | 		} | ||||||
| 	} | 	} | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|   | |||||||
| @@ -4,6 +4,7 @@ import ( | |||||||
| 	"context" | 	"context" | ||||||
| 	"database/sql" | 	"database/sql" | ||||||
| 	"github.com/jmoiron/sqlx" | 	"github.com/jmoiron/sqlx" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -17,38 +18,38 @@ const ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| type Tx interface { | type Tx interface { | ||||||
|  | 	Queryable | ||||||
|  |  | ||||||
| 	Rollback() error | 	Rollback() error | ||||||
| 	Commit() error | 	Commit() error | ||||||
| 	Status() TxStatus | 	Status() TxStatus | ||||||
| 	Exec(ctx context.Context, sql string, prep PP) (sql.Result, error) |  | ||||||
| 	Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error) |  | ||||||
| } | } | ||||||
|  |  | ||||||
| type transaction struct { | type transaction struct { | ||||||
| 	tx       *sqlx.Tx | 	tx       *sqlx.Tx | ||||||
| 	id       uint16 | 	id       uint16 | ||||||
| 	lstr     []Listener |  | ||||||
| 	status   TxStatus | 	status   TxStatus | ||||||
| 	execCtr  int | 	execCtr  int | ||||||
| 	queryCtr int | 	queryCtr int | ||||||
|  | 	db       *database | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewTransaction(xtx *sqlx.Tx, txid uint16, lstr []Listener) Tx { | func NewTransaction(xtx *sqlx.Tx, txid uint16, db *database) Tx { | ||||||
| 	return &transaction{ | 	return &transaction{ | ||||||
| 		tx:       xtx, | 		tx:       xtx, | ||||||
| 		id:       txid, | 		id:       txid, | ||||||
| 		lstr:     lstr, |  | ||||||
| 		status:   TxStatusInitial, | 		status:   TxStatusInitial, | ||||||
| 		execCtr:  0, | 		execCtr:  0, | ||||||
| 		queryCtr: 0, | 		queryCtr: 0, | ||||||
|  | 		db:       db, | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func (tx *transaction) Rollback() error { | func (tx *transaction) Rollback() error { | ||||||
| 	for _, v := range tx.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		err := v.PreTxRollback(tx.id) | 		err := v.PreTxRollback(tx.id) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return exerr.Wrap(err, "failed to call SQL pre-rollback listener").Int("tx.id", int(tx.id)).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -58,7 +59,7 @@ func (tx *transaction) Rollback() error { | |||||||
| 		tx.status = TxStatusRollback | 		tx.status = TxStatusRollback | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	for _, v := range tx.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		v.PostTxRollback(tx.id, result) | 		v.PostTxRollback(tx.id, result) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -66,10 +67,10 @@ func (tx *transaction) Rollback() error { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (tx *transaction) Commit() error { | func (tx *transaction) Commit() error { | ||||||
| 	for _, v := range tx.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		err := v.PreTxCommit(tx.id) | 		err := v.PreTxCommit(tx.id) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return exerr.Wrap(err, "failed to call SQL pre-commit listener").Int("tx.id", int(tx.id)).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -79,7 +80,7 @@ func (tx *transaction) Commit() error { | |||||||
| 		tx.status = TxStatusComitted | 		tx.status = TxStatusComitted | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	for _, v := range tx.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		v.PostTxRollback(tx.id, result) | 		v.PostTxRollback(tx.id, result) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -88,10 +89,10 @@ func (tx *transaction) Commit() error { | |||||||
|  |  | ||||||
| func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) { | func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) { | ||||||
| 	origsql := sqlstr | 	origsql := sqlstr | ||||||
| 	for _, v := range tx.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | 		err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -101,22 +102,22 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re | |||||||
| 		tx.status = TxStatusActive | 		tx.status = TxStatusActive | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	for _, v := range tx.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep) | 		v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 	} | 	} | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) { | func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) { | ||||||
| 	origsql := sqlstr | 	origsql := sqlstr | ||||||
| 	for _, v := range tx.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | 		err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -126,12 +127,12 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx | |||||||
| 		tx.status = TxStatusActive | 		tx.status = TxStatusActive | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	for _, v := range tx.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep) | 		v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 	} | 	} | ||||||
| 	return rows, nil | 	return rows, nil | ||||||
| } | } | ||||||
| @@ -140,6 +141,10 @@ func (tx *transaction) Status() TxStatus { | |||||||
| 	return tx.status | 	return tx.status | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (tx *transaction) ListConverter() []DBTypeConverter { | ||||||
|  | 	return tx.db.conv | ||||||
|  | } | ||||||
|  |  | ||||||
| func (tx *transaction) Traffic() (int, int) { | func (tx *transaction) Traffic() (int, int) { | ||||||
| 	return tx.execCtr, tx.queryCtr | 	return tx.execCtr, tx.queryCtr | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,33 +1,28 @@ | |||||||
| package syncext | package syncext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" |  | ||||||
| 	"sync" | 	"sync" | ||||||
| 	"time" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type AtomicBool struct { | type Atomic[T any] struct { | ||||||
| 	v        bool | 	v    T | ||||||
| 	listener map[string]chan bool | 	lock sync.RWMutex | ||||||
| 	lock     sync.Mutex |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewAtomicBool(value bool) *AtomicBool { | func NewAtomic[T any](value T) *Atomic[T] { | ||||||
| 	return &AtomicBool{ | 	return &Atomic[T]{ | ||||||
| 		v:    value, | 		v:    value, | ||||||
| 		listener: make(map[string]chan bool), | 		lock: sync.RWMutex{}, | ||||||
| 		lock:     sync.Mutex{}, |  | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Get() bool { | func (a *Atomic[T]) Get() T { | ||||||
| 	a.lock.Lock() | 	a.lock.RLock() | ||||||
| 	defer a.lock.Unlock() | 	defer a.lock.RUnlock() | ||||||
| 	return a.v | 	return a.v | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Set(value bool) bool { | func (a *Atomic[T]) Set(value T) T { | ||||||
| 	a.lock.Lock() | 	a.lock.Lock() | ||||||
| 	defer a.lock.Unlock() | 	defer a.lock.Unlock() | ||||||
|  |  | ||||||
| @@ -35,79 +30,5 @@ func (a *AtomicBool) Set(value bool) bool { | |||||||
|  |  | ||||||
| 	a.v = value | 	a.v = value | ||||||
|  |  | ||||||
| 	for k, v := range a.listener { |  | ||||||
| 		select { |  | ||||||
| 		case v <- value: |  | ||||||
| 			// message sent |  | ||||||
| 		default: |  | ||||||
| 			// no receiver on channel |  | ||||||
| 			delete(a.listener, k) |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return oldValue | 	return oldValue | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Wait(waitFor bool) { |  | ||||||
| 	_ = a.WaitWithContext(context.Background(), waitFor) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (a *AtomicBool) WaitWithTimeout(timeout time.Duration, waitFor bool) error { |  | ||||||
| 	ctx, cancel := context.WithTimeout(context.Background(), timeout) |  | ||||||
| 	defer cancel() |  | ||||||
| 	return a.WaitWithContext(ctx, waitFor) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (a *AtomicBool) WaitWithContext(ctx context.Context, waitFor bool) error { |  | ||||||
| 	if err := ctx.Err(); err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if a.Get() == waitFor { |  | ||||||
| 		return nil |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	uuid, _ := langext.NewHexUUID() |  | ||||||
|  |  | ||||||
| 	waitchan := make(chan bool) |  | ||||||
|  |  | ||||||
| 	a.lock.Lock() |  | ||||||
| 	a.listener[uuid] = waitchan |  | ||||||
| 	a.lock.Unlock() |  | ||||||
| 	defer func() { |  | ||||||
| 		a.lock.Lock() |  | ||||||
| 		delete(a.listener, uuid) |  | ||||||
| 		a.lock.Unlock() |  | ||||||
| 	}() |  | ||||||
|  |  | ||||||
| 	for { |  | ||||||
| 		if err := ctx.Err(); err != nil { |  | ||||||
| 			return err |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		timeOut := 1024 * time.Millisecond |  | ||||||
|  |  | ||||||
| 		if dl, ok := ctx.Deadline(); ok { |  | ||||||
| 			timeOutMax := dl.Sub(time.Now()) |  | ||||||
| 			if timeOutMax <= 0 { |  | ||||||
| 				timeOut = 0 |  | ||||||
| 			} else if 0 < timeOutMax && timeOutMax < timeOut { |  | ||||||
| 				timeOut = timeOutMax |  | ||||||
| 			} |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if v, ok := ReadChannelWithTimeout(waitchan, timeOut); ok { |  | ||||||
| 			if v == waitFor { |  | ||||||
| 				return nil |  | ||||||
| 			} |  | ||||||
| 		} else { |  | ||||||
| 			if err := ctx.Err(); err != nil { |  | ||||||
| 				return err |  | ||||||
| 			} |  | ||||||
|  |  | ||||||
| 			if a.Get() == waitFor { |  | ||||||
| 				return nil |  | ||||||
| 			} |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
|   | |||||||
							
								
								
									
										113
									
								
								syncext/bool.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										113
									
								
								syncext/bool.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,113 @@ | |||||||
|  | package syncext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"sync" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type AtomicBool struct { | ||||||
|  | 	v        bool | ||||||
|  | 	listener map[string]chan bool | ||||||
|  | 	lock     sync.Mutex | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewAtomicBool(value bool) *AtomicBool { | ||||||
|  | 	return &AtomicBool{ | ||||||
|  | 		v:        value, | ||||||
|  | 		listener: make(map[string]chan bool), | ||||||
|  | 		lock:     sync.Mutex{}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Get() bool { | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	defer a.lock.Unlock() | ||||||
|  | 	return a.v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Set(value bool) bool { | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	defer a.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	oldValue := a.v | ||||||
|  |  | ||||||
|  | 	a.v = value | ||||||
|  |  | ||||||
|  | 	for k, v := range a.listener { | ||||||
|  | 		select { | ||||||
|  | 		case v <- value: | ||||||
|  | 			// message sent | ||||||
|  | 		default: | ||||||
|  | 			// no receiver on channel | ||||||
|  | 			delete(a.listener, k) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return oldValue | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Wait(waitFor bool) { | ||||||
|  | 	_ = a.WaitWithContext(context.Background(), waitFor) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) WaitWithTimeout(timeout time.Duration, waitFor bool) error { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), timeout) | ||||||
|  | 	defer cancel() | ||||||
|  | 	return a.WaitWithContext(ctx, waitFor) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) WaitWithContext(ctx context.Context, waitFor bool) error { | ||||||
|  | 	if err := ctx.Err(); err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if a.Get() == waitFor { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	uuid, _ := langext.NewHexUUID() | ||||||
|  |  | ||||||
|  | 	waitchan := make(chan bool) | ||||||
|  |  | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	a.listener[uuid] = waitchan | ||||||
|  | 	a.lock.Unlock() | ||||||
|  | 	defer func() { | ||||||
|  | 		a.lock.Lock() | ||||||
|  | 		delete(a.listener, uuid) | ||||||
|  | 		a.lock.Unlock() | ||||||
|  | 	}() | ||||||
|  |  | ||||||
|  | 	for { | ||||||
|  | 		if err := ctx.Err(); err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		timeOut := 1024 * time.Millisecond | ||||||
|  |  | ||||||
|  | 		if dl, ok := ctx.Deadline(); ok { | ||||||
|  | 			timeOutMax := dl.Sub(time.Now()) | ||||||
|  | 			if timeOutMax <= 0 { | ||||||
|  | 				timeOut = 0 | ||||||
|  | 			} else if 0 < timeOutMax && timeOutMax < timeOut { | ||||||
|  | 				timeOut = timeOutMax | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if v, ok := ReadChannelWithTimeout(waitchan, timeOut); ok { | ||||||
|  | 			if v == waitFor { | ||||||
|  | 				return nil | ||||||
|  | 			} | ||||||
|  | 		} else { | ||||||
|  | 			if err := ctx.Err(); err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			if a.Get() == waitFor { | ||||||
|  | 				return nil | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -14,6 +14,20 @@ func AssertEqual[T comparable](t *testing.T, actual T, expected T) { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AssertArrayEqual[T comparable](t *testing.T, actual []T, expected []T) { | ||||||
|  | 	t.Helper() | ||||||
|  | 	if len(actual) != len(expected) { | ||||||
|  | 		t.Errorf("values differ: Actual: '%v', Expected: '%v' (len %d <> %d)", actual, expected, len(actual), len(expected)) | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	for i := 0; i < len(actual); i++ { | ||||||
|  | 		if actual[i] != expected[i] { | ||||||
|  | 			t.Errorf("values differ: Actual: '%v', Expected: '%v' (at index %d)", actual, expected, i) | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
| func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) { | func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) { | ||||||
| 	t.Helper() | 	t.Helper() | ||||||
| 	if actual == expected { | 	if actual == expected { | ||||||
|   | |||||||
| @@ -44,6 +44,8 @@ type Coll[TData any] struct { | |||||||
| 	implDataTypeMap     map[reflect.Type]map[string]fullTypeRef                  // dynamic list of fields of TData implementations (only if TData is an interface) | 	implDataTypeMap     map[reflect.Type]map[string]fullTypeRef                  // dynamic list of fields of TData implementations (only if TData is an interface) | ||||||
| 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | ||||||
| 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | ||||||
|  | 	unmarshalHooks      []func(d TData) TData                                    // called for every object after unmarshalling | ||||||
|  | 	extraModPipeline    []func(ctx context.Context) mongo.Pipeline               // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Collection() *mongo.Collection { | func (c *Coll[TData]) Collection() *mongo.Collection { | ||||||
| @@ -54,14 +56,6 @@ func (c *Coll[TData]) Name() string { | |||||||
| 	return c.coll.Name() | 	return c.coll.Name() | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] { |  | ||||||
|  |  | ||||||
| 	c.EnsureInitializedReflection(example) |  | ||||||
|  |  | ||||||
| 	c.customDecoder = langext.Ptr(cdf) |  | ||||||
| 	return c |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Indexes() mongo.IndexView { | func (c *Coll[TData]) Indexes() mongo.IndexView { | ||||||
| 	return c.coll.Indexes() | 	return c.coll.Indexes() | ||||||
| } | } | ||||||
| @@ -74,6 +68,32 @@ func (c *Coll[TData]) Drop(ctx context.Context) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] { | ||||||
|  |  | ||||||
|  | 	c.EnsureInitializedReflection(example) | ||||||
|  |  | ||||||
|  | 	c.customDecoder = langext.Ptr(cdf) | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] { | ||||||
|  | 	c.unmarshalHooks = append(c.unmarshalHooks, fn) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] { | ||||||
|  | 	c.extraModPipeline = append(c.extraModPipeline, func(ctx context.Context) mongo.Pipeline { return p }) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithModifyingPipelineFunc(fn func(ctx context.Context) mongo.Pipeline) *Coll[TData] { | ||||||
|  | 	c.extraModPipeline = append(c.extraModPipeline, fn) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { | func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { | ||||||
|  |  | ||||||
| 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | ||||||
|   | |||||||
| @@ -2,37 +2,39 @@ package wmo | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) { | func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) { | ||||||
| 	if c.customDecoder != nil { |  | ||||||
|  |  | ||||||
| 		res, err := (*c.customDecoder)(ctx, dec) |  | ||||||
| 		if err != nil { |  | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build() |  | ||||||
| 		} |  | ||||||
| 		return res, nil |  | ||||||
|  |  | ||||||
| 	} else { |  | ||||||
|  |  | ||||||
| 	var res TData | 	var res TData | ||||||
|  | 	var err error | ||||||
|  |  | ||||||
| 		err := dec.Decode(&res) | 	if c.customDecoder != nil { | ||||||
|  | 		res, err = (*c.customDecoder)(ctx, dec) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build() | 			return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).NoLog().Build() | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		err = dec.Decode(&res) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).NoLog().Build() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, hook := range c.unmarshalHooks { | ||||||
|  | 		res = hook(res) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return res, nil | 	return res, nil | ||||||
|  |  | ||||||
| 	} |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) { | func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) { | ||||||
| 	if c.customDecoder != nil { |  | ||||||
|  |  | ||||||
| 	res := make([]TData, 0, cursor.RemainingBatchLength()) | 	res := make([]TData, 0, cursor.RemainingBatchLength()) | ||||||
|  |  | ||||||
|  | 	if c.customDecoder != nil { | ||||||
| 		for cursor.Next(ctx) { | 		for cursor.Next(ctx) { | ||||||
| 			entry, err := (*c.customDecoder)(ctx, cursor) | 			entry, err := (*c.customDecoder)(ctx, cursor) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| @@ -40,20 +42,48 @@ func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData | |||||||
| 			} | 			} | ||||||
| 			res = append(res, entry) | 			res = append(res, entry) | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		return res, nil |  | ||||||
|  |  | ||||||
| 	} else { | 	} else { | ||||||
|  |  | ||||||
| 		res := make([]TData, 0, cursor.RemainingBatchLength()) |  | ||||||
|  |  | ||||||
| 		err := cursor.All(ctx, &res) | 		err := cursor.All(ctx, &res) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build() | 			return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build() | ||||||
| 		} | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for i := 0; i < len(res); i++ { | ||||||
|  | 		for _, hook := range c.unmarshalHooks { | ||||||
|  | 			res[i] = hook(res[i]) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	return res, nil | 	return res, nil | ||||||
|  |  | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) decodeSingleOrRequery(ctx context.Context, dec Decodable) (TData, error) { | ||||||
|  | 	if c.extraModPipeline == nil { | ||||||
|  |  | ||||||
|  | 		// simple case, we can just decode the result and return it | ||||||
|  | 		return c.decodeSingle(ctx, dec) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		// annyoing case, we have a extraModPipeline and need to re-query the document such that the extraModPipeline is applied... | ||||||
|  |  | ||||||
|  | 		type genDoc struct { | ||||||
|  | 			ID any `bson:"_id"` | ||||||
|  | 		} | ||||||
|  | 		var res genDoc | ||||||
|  | 		err := dec.Decode(&res) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").NoLog().Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).NoLog().Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return *v, nil | ||||||
|  |  | ||||||
|  | 	} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,87 +0,0 @@ | |||||||
| package wmo |  | ||||||
|  |  | ||||||
| import ( |  | ||||||
| 	"go.mongodb.org/mongo-driver/bson" |  | ||||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { |  | ||||||
|  |  | ||||||
| 	cond := bson.A{} |  | ||||||
| 	sort := bson.D{} |  | ||||||
|  |  | ||||||
| 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if sortPrimary == ct.SortASC { |  | ||||||
| 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary |  | ||||||
| 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}}) |  | ||||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) |  | ||||||
| 	} else if sortPrimary == ct.SortDESC { |  | ||||||
| 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary |  | ||||||
| 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}}) |  | ||||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { |  | ||||||
|  |  | ||||||
| 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) |  | ||||||
| 		if err != nil { |  | ||||||
| 			return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if *sortSecondary == ct.SortASC { |  | ||||||
|  |  | ||||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer) |  | ||||||
| 			cond = append(cond, bson.M{"$and": bson.A{ |  | ||||||
| 				bson.M{fieldPrimary: valuePrimary}, |  | ||||||
| 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, |  | ||||||
| 			}}) |  | ||||||
|  |  | ||||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) |  | ||||||
|  |  | ||||||
| 		} else if *sortSecondary == ct.SortDESC { |  | ||||||
|  |  | ||||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older) |  | ||||||
| 			cond = append(cond, bson.M{"$and": bson.A{ |  | ||||||
| 				bson.M{fieldPrimary: valuePrimary}, |  | ||||||
| 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, |  | ||||||
| 			}}) |  | ||||||
|  |  | ||||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) |  | ||||||
|  |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	pipeline := make([]bson.D, 0, 3) |  | ||||||
|  |  | ||||||
| 	if token.Mode == ct.CTMStart { |  | ||||||
|  |  | ||||||
| 		// no gt/lt condition |  | ||||||
|  |  | ||||||
| 	} else if token.Mode == ct.CTMNormal { |  | ||||||
|  |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}}) |  | ||||||
|  |  | ||||||
| 	} else if token.Mode == ct.CTMEnd { |  | ||||||
|  |  | ||||||
| 		// false |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}}) |  | ||||||
|  |  | ||||||
| 	} else { |  | ||||||
|  |  | ||||||
| 		return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() |  | ||||||
|  |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) |  | ||||||
|  |  | ||||||
| 	if pageSize != nil { |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return pipeline, nil |  | ||||||
| } |  | ||||||
| @@ -5,9 +5,15 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo/options" | 	"go.mongodb.org/mongo-driver/mongo/options" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | ||||||
|  |  | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
| @@ -22,6 +28,11 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | ||||||
|  |  | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
| @@ -39,6 +50,11 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | ||||||
|  |  | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
|   | |||||||
							
								
								
									
										162
									
								
								wmo/queryFind.go
									
									
									
									
									
								
							
							
						
						
									
										162
									
								
								wmo/queryFind.go
									
									
									
									
									
								
							| @@ -2,69 +2,58 @@ package wmo | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
| 	"errors" |  | ||||||
| 	"go.mongodb.org/mongo-driver/bson" | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo/options" | 	"go.mongodb.org/mongo-driver/mongo/options" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, filter) |  | ||||||
| 	if err := mongoRes.Err(); err != nil { |  | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed"). |  | ||||||
| 			Str("collection", c.Name()). |  | ||||||
| 			Any("filter", filter). |  | ||||||
| 			Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, filter) |  | ||||||
|  |  | ||||||
| 	res, err := c.decodeSingle(ctx, mongoRes) |  | ||||||
| 	if errors.Is(err, mongo.ErrNoDocuments) { |  | ||||||
| 		return nil, nil |  | ||||||
| 	} |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Str("collection", c.Name()).Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return &res, nil |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id}) |  | ||||||
| 	if err := mongoRes.Err(); err != nil { |  | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed"). |  | ||||||
| 			Str("collection", c.Name()). |  | ||||||
| 			Id("id", id). |  | ||||||
| 			Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id}) |  | ||||||
|  |  | ||||||
| 	res, err := c.decodeSingle(ctx, mongoRes) |  | ||||||
| 	if errors.Is(err, mongo.ErrNoDocuments) { |  | ||||||
| 		return nil, nil |  | ||||||
| 	} |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return &res, nil |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { | func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { | ||||||
| 	cursor, err := c.coll.Find(ctx, filter, opts...) |  | ||||||
|  | 	pipeline := mongo.Pipeline{} | ||||||
|  | 	pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Sort != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$sort", Value: opt.Sort}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Skip != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$skip", Value: *opt.Skip}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Limit != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$limit", Value: *opt.Limit}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Projection != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$project", Value: opt.Projection}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	convOpts := make([]*options.AggregateOptions, 0, len(opts)) | ||||||
|  | 	for _, v := range opts { | ||||||
|  | 		vConv, err := convertFindOpt(v) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Any("opts", opts).Str("collection", c.Name()).Build() | 			return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  | 		convOpts = append(convOpts, vConv) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	cursor, err := c.coll.Aggregate(ctx, pipeline, convOpts...) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := c.decodeAll(ctx, cursor) | 	res, err := c.decodeAll(ctx, cursor) | ||||||
| @@ -74,3 +63,66 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options. | |||||||
|  |  | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // converts FindOptions to AggregateOptions | ||||||
|  | func convertFindOpt(v *options.FindOptions) (*options.AggregateOptions, error) { | ||||||
|  | 	if v == nil { | ||||||
|  | 		return nil, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := options.Aggregate() | ||||||
|  |  | ||||||
|  | 	if v.AllowDiskUse != nil { | ||||||
|  | 		r.SetAllowDiskUse(*v.AllowDiskUse) | ||||||
|  | 	} | ||||||
|  | 	if v.AllowPartialResults != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'AllowPartialResults' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.BatchSize != nil { | ||||||
|  | 		r.SetBatchSize(*v.BatchSize) | ||||||
|  | 	} | ||||||
|  | 	if v.Collation != nil { | ||||||
|  | 		r.SetCollation(v.Collation) | ||||||
|  | 	} | ||||||
|  | 	if v.Comment != nil { | ||||||
|  | 		r.SetComment(*v.Comment) | ||||||
|  | 	} | ||||||
|  | 	if v.CursorType != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'CursorType' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Hint != nil { | ||||||
|  | 		r.SetHint(v.Hint) | ||||||
|  | 	} | ||||||
|  | 	if v.Max != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Max' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.MaxAwaitTime != nil { | ||||||
|  | 		r.SetMaxAwaitTime(*v.MaxAwaitTime) | ||||||
|  | 	} | ||||||
|  | 	if v.MaxTime != nil { | ||||||
|  | 		r.SetMaxTime(*v.MaxTime) | ||||||
|  | 	} | ||||||
|  | 	if v.Min != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Min' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.NoCursorTimeout != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'NoCursorTimeout' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.OplogReplay != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'OplogReplay' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.ReturnKey != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'ReturnKey' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.ShowRecordID != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'ShowRecordID' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Snapshot != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Snapshot' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Let != nil { | ||||||
|  | 		r.SetLet(v.Let) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										95
									
								
								wmo/queryFindOne.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										95
									
								
								wmo/queryFindOne.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,95 @@ | |||||||
|  | package wmo | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"errors" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, filter, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, filter, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": id}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": id}, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowNull bool) (*TData, error) { | ||||||
|  |  | ||||||
|  | 	if len(c.extraModPipeline) == 0 { | ||||||
|  |  | ||||||
|  | 		// simple case, use mongo FindOne | ||||||
|  |  | ||||||
|  | 		mongoRes := c.coll.FindOne(ctx, filter) | ||||||
|  |  | ||||||
|  | 		res, err := c.decodeSingle(ctx, mongoRes) | ||||||
|  | 		if allowNull && errors.Is(err, mongo.ErrNoDocuments) { | ||||||
|  | 			return nil, nil | ||||||
|  | 		} | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).NoLog().Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return &res, nil | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		// complex case, we one ore more additional pipeline stages, convert to aggregation | ||||||
|  |  | ||||||
|  | 		pipeline := mongo.Pipeline{} | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}}) | ||||||
|  |  | ||||||
|  | 		for _, ppl := range c.extraModPipeline { | ||||||
|  | 			pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if cursor.Next(ctx) { | ||||||
|  | 			v, err := c.decodeSingle(ctx, cursor) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed to decode results").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
|  | 			} | ||||||
|  | 			return &v, nil | ||||||
|  | 		} else if allowNull { | ||||||
|  | 			return nil, nil | ||||||
|  | 		} else { | ||||||
|  | 			return nil, exerr.Wrap(mongo.ErrNoDocuments, "mongo-aggregation [find-one] returned no documents").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -14,9 +14,25 @@ func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, erro | |||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID}) | 	r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | // InsertOneUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is any instead of TData) | ||||||
|  | func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TData, error) { | ||||||
|  | 	insRes, err := c.coll.InsertOne(ctx, valueIn) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  | 	return *r, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | ||||||
| @@ -27,3 +43,13 @@ func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.I | |||||||
|  |  | ||||||
| 	return insRes, nil | 	return insRes, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // InsertManyUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is []any instead of []TData) | ||||||
|  | func (c *Coll[TData]) InsertManyUnchecked(ctx context.Context, valueIn []any) (*mongo.InsertManyResult, error) { | ||||||
|  | 	insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return insRes, nil | ||||||
|  | } | ||||||
|   | |||||||
| @@ -6,6 +6,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | ||||||
| @@ -34,7 +35,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 		sortDirSecondary = nil | 		sortDirSecondary = nil | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | 	paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr. | 		return nil, ct.CursorToken{}, exerr. | ||||||
| 			Wrap(err, "failed to create pagination"). | 			Wrap(err, "failed to create pagination"). | ||||||
| @@ -51,6 +52,10 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
|  |  | ||||||
| 	pipeline = append(pipeline, paginationPipeline...) | 	pipeline = append(pipeline, paginationPipeline...) | ||||||
|  |  | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | 		return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
| @@ -91,11 +96,11 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 	return entities, nextToken, nil | 	return entities, nextToken, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) { | ||||||
| 	type countRes struct { | 	type countRes struct { | ||||||
| 		Count int64 `bson:"c"` | 		Count int64 `bson:"c"` | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Count(ctx context.Context, filter ct.Filter) (int64, error) { |  | ||||||
| 	pipeline := filter.FilterQuery() | 	pipeline := filter.FilterQuery() | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}}) | 	pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}}) | ||||||
| @@ -130,3 +135,83 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS | |||||||
| 	} | 	} | ||||||
| 	return data, token, count, nil | 	return data, token, count, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { | ||||||
|  |  | ||||||
|  | 	cond := bson.A{} | ||||||
|  | 	sort := bson.D{} | ||||||
|  |  | ||||||
|  | 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if sortPrimary == ct.SortASC { | ||||||
|  | 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary | ||||||
|  | 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}}) | ||||||
|  | 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||||
|  | 	} else if sortPrimary == ct.SortDESC { | ||||||
|  | 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary | ||||||
|  | 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}}) | ||||||
|  | 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { | ||||||
|  |  | ||||||
|  | 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if *sortSecondary == ct.SortASC { | ||||||
|  |  | ||||||
|  | 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer) | ||||||
|  | 			cond = append(cond, bson.M{"$and": bson.A{ | ||||||
|  | 				bson.M{fieldPrimary: valuePrimary}, | ||||||
|  | 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, | ||||||
|  | 			}}) | ||||||
|  |  | ||||||
|  | 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||||
|  |  | ||||||
|  | 		} else if *sortSecondary == ct.SortDESC { | ||||||
|  |  | ||||||
|  | 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older) | ||||||
|  | 			cond = append(cond, bson.M{"$and": bson.A{ | ||||||
|  | 				bson.M{fieldPrimary: valuePrimary}, | ||||||
|  | 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, | ||||||
|  | 			}}) | ||||||
|  |  | ||||||
|  | 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||||
|  |  | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipeline := make([]bson.D, 0, 3) | ||||||
|  |  | ||||||
|  | 	if token.Mode == ct.CTMStart { | ||||||
|  |  | ||||||
|  | 		// no gt/lt condition | ||||||
|  |  | ||||||
|  | 	} else if token.Mode == ct.CTMNormal { | ||||||
|  |  | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}}) | ||||||
|  |  | ||||||
|  | 	} else if token.Mode == ct.CTMEnd { | ||||||
|  |  | ||||||
|  | 		// false | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}}) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) | ||||||
|  |  | ||||||
|  | 	if pageSize != nil { | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return pipeline, nil | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										87
									
								
								wmo/queryPaginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										87
									
								
								wmo/queryPaginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,87 @@ | |||||||
|  | package wmo | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||||
|  | 	type totalCountResult struct { | ||||||
|  | 		Count int `bson:"count"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if page < 0 { | ||||||
|  | 		page = 1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineSort := mongo.Pipeline{} | ||||||
|  | 	pipelineFilter := mongo.Pipeline{} | ||||||
|  | 	sort := bson.D{} | ||||||
|  |  | ||||||
|  | 	if filter != nil { | ||||||
|  | 		pipelineFilter = filter.FilterQuery() | ||||||
|  | 		sort = filter.Sort() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if len(sort) != 0 { | ||||||
|  | 		pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: sort}}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelinePaginate := mongo.Pipeline{} | ||||||
|  | 	if limit != nil { | ||||||
|  | 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *limit * (page - 1)}}) | ||||||
|  | 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *limit}}) | ||||||
|  | 	} else { | ||||||
|  | 		page = 1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineCount := mongo.Pipeline{} | ||||||
|  | 	pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}}) | ||||||
|  |  | ||||||
|  | 	extrModPipelineResolved := mongo.Pipeline{} | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort) | ||||||
|  | 	pipelineTotalCount := langext.ArrConcat(pipelineFilter, pipelineCount) | ||||||
|  |  | ||||||
|  | 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	entities, err := c.decodeAll(ctx, cursorList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var tcRes totalCountResult | ||||||
|  | 	if cursorTotalCount.Next(ctx) { | ||||||
|  | 		err = cursorTotalCount.Decode(&tcRes) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		tcRes.Count = 0 // no entries in DB | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	paginationObj := pag.Pagination{ | ||||||
|  | 		Page:             page, | ||||||
|  | 		Limit:            langext.Coalesce(limit, tcRes.Count), | ||||||
|  | 		TotalPages:       pag.CalcPaginationTotalPages(tcRes.Count, langext.Coalesce(limit, tcRes.Count)), | ||||||
|  | 		TotalItems:       tcRes.Count, | ||||||
|  | 		CurrentPageCount: len(entities), | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return entities, paginationObj, nil | ||||||
|  | } | ||||||
| @@ -18,7 +18,7 @@ func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, | |||||||
| 			Build() | 			Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | 	return c.decodeSingleOrRequery(ctx, mongoRes) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error { | func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error { | ||||||
| @@ -81,5 +81,5 @@ func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, | |||||||
| 			Build() | 			Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | 	return c.decodeSingleOrRequery(ctx, mongoRes) | ||||||
| } | } | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user