Compare commits
	
		
			42 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 8f15d42173 | |||
| 07fa21dcca | |||
| e657de7f78 | |||
| c534e998e8 | |||
| 88642770c5 | |||
| 8528b5cb66 | |||
| 5ba84bd8ee | |||
| 1260b2dc77 | |||
| 7d18b913c6 | |||
| d1f9069f2f | |||
| fa6d73301e | |||
| bfe62799d3 | |||
| ede912eb7b | |||
| ff8f128fe8 | |||
| 1971f1396f | |||
| bf6c184d12 | |||
| 770f5c5c64 | |||
| 623c021689 | |||
| afcc89bf9e | |||
| 1672e8f8fd | |||
| 398ed56d32 | |||
| f3ecba3883 | |||
| 45031b05cf | |||
| 7413ea045d | |||
| 62c9a4e734 | |||
| 3a8baaa6d9 | |||
| 498785e213 | |||
| 678f95642c | |||
| dacc97e2ce | |||
| f8c0c0afa0 | |||
| 2fbd5cf965 | |||
| 75f71fe3db | |||
| ab1a1ab6f6 | |||
| 19ee5019ef | |||
| 42b68507f2 | |||
| 9d0047a11e | |||
| 06d81f1682 | |||
| 7b8ab03779 | |||
| 07cbcf5a0a | |||
| da41ec3e84 | |||
| 592fae25af | |||
| 7968460fa2 | 
| @@ -1,9 +0,0 @@ | |||||||
| FROM golang:latest |  | ||||||
|  |  | ||||||
| RUN apt install -y make curl python3 && go install gotest.tools/gotestsum@latest |  | ||||||
|  |  | ||||||
| COPY . /source |  | ||||||
|  |  | ||||||
| WORKDIR /source |  | ||||||
|  |  | ||||||
| CMD ["make", "test"] |  | ||||||
| @@ -6,7 +6,12 @@ | |||||||
| name: Build Docker and Deploy | name: Build Docker and Deploy | ||||||
| run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }} | run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }} | ||||||
|  |  | ||||||
| on: [push] | on: | ||||||
|  |   push: | ||||||
|  |     branches: | ||||||
|  |       - '*' | ||||||
|  |       - '**' | ||||||
|  |  | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   run_tests: |   run_tests: | ||||||
| @@ -17,14 +22,34 @@ jobs: | |||||||
|       - name: Check out code |       - name: Check out code | ||||||
|         uses: actions/checkout@v3 |         uses: actions/checkout@v3 | ||||||
|  |  | ||||||
|       - name: Build test docker |       - name: Setup go | ||||||
|         id: build_docker |         uses: actions/setup-go@v4 | ||||||
|         run: echo "DOCKER_IMG_ID=$(docker build -q . -f .gitea/workflows/Dockerfile_tests || echo __err_build__)" >> $GITHUB_OUTPUT |         with: | ||||||
|  |           go-version-file: '${{ gitea.workspace }}/go.mod' | ||||||
|  |  | ||||||
|  |       - name: Setup packages | ||||||
|  |         uses: awalsh128/cache-apt-pkgs-action@latest | ||||||
|  |         with: | ||||||
|  |           packages: curl python3 | ||||||
|  |           version: 1.0 | ||||||
|  |  | ||||||
|  |       - name: go version | ||||||
|  |         run: go version | ||||||
|  |  | ||||||
|       - name: Run tests |       - name: Run tests | ||||||
|         run: docker run --rm "${{ steps.build_docker.outputs.DOCKER_IMG_ID }}" |         run: cd "${{ gitea.workspace }}" && make test | ||||||
|  |  | ||||||
|       - name: Cleanup |       - name: Send failure mail | ||||||
|         if: always() |         if: failure() | ||||||
|         run: docker image rm "${{ steps.build_docker.outputs.DOCKER_IMG_ID }}" |         uses: dawidd6/action-send-mail@v3 | ||||||
|  |         with: | ||||||
|  |           server_address: smtp.fastmail.com | ||||||
|  |           server_port: 465 | ||||||
|  |           secure: true | ||||||
|  |           username: ${{secrets.MAIL_USERNAME}} | ||||||
|  |           password: ${{secrets.MAIL_PASSWORD}} | ||||||
|  |           subject: Pipeline on '${{ gitea.repository }}' failed | ||||||
|  |           to: ${{ steps.commiter_info.outputs.MAIL }} | ||||||
|  |           from: Gitea Actions <gitea_actions@blackforestbytes.de> | ||||||
|  |           body: "Go to https://gogs.blackforestbytes.com/${{ gitea.repository }}/actions" | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								.idea/golinter.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.idea/golinter.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8"?> | ||||||
|  | <project version="4"> | ||||||
|  |   <component name="GoLinterSettings"> | ||||||
|  |     <option name="checkGoLinterExe" value="false" /> | ||||||
|  |   </component> | ||||||
|  | </project> | ||||||
| @@ -11,17 +11,19 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | |||||||
| ### Packages: | ### Packages: | ||||||
|  |  | ||||||
| | Name        | Maintainer | Description                                                                                                   | | | Name        | Maintainer | Description                                                                                                   | | ||||||
| |--------------|------------|---------------------------------------------------------------------------------------------------------------| | |-------------|------------|---------------------------------------------------------------------------------------------------------------| | ||||||
| | langext     | Mike       | General uttility/helper functions, (everything thats missing from go standard library)                        | | | langext     | Mike       | General uttility/helper functions, (everything thats missing from go standard library)                        | | ||||||
| | mathext     | Mike       | Utility/Helper functions for math                                                                             | | | mathext     | Mike       | Utility/Helper functions for math                                                                             | | ||||||
| | cryptext    | Mike       | Utility/Helper functions for encryption                                                                       | | | cryptext    | Mike       | Utility/Helper functions for encryption                                                                       | | ||||||
| | syncext     | Mike       | Utility/Helper funtions for multi-threading / mutex / channels                                                | | | syncext     | Mike       | Utility/Helper funtions for multi-threading / mutex / channels                                                | | ||||||
| | dataext     | Mike       | Various useful data structures                                                                                | | | dataext     | Mike       | Various useful data structures                                                                                | | ||||||
| | zipext      | Mike       | Utility for zip/gzip/tar etc                                                                                  | | | zipext      | Mike       | Utility for zip/gzip/tar etc                                                                                  | | ||||||
| | reflectext   | Mike       | Utility for golagn reflection                                                                                 | | | reflectext  | Mike       | Utility for golang reflection                                                                                 | | ||||||
|  | | fsext       | Mike       | Utility for filesytem access                                                                                  | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | ||||||
| | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | ||||||
|  | | pagination  | Mike       | Pagination implementation                                                                                     | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | ||||||
| | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | ||||||
|   | |||||||
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										182
									
								
								bfcodegen/csid-generate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										182
									
								
								bfcodegen/csid-generate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,182 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
|  | 	"io" | ||||||
|  | 	"os" | ||||||
|  | 	"path" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"regexp" | ||||||
|  | 	"strings" | ||||||
|  | 	"text/template" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type CSIDDef struct { | ||||||
|  | 	File         string | ||||||
|  | 	FileRelative string | ||||||
|  | 	Name         string | ||||||
|  | 	Prefix       string | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
|  | var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`)) | ||||||
|  |  | ||||||
|  | var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|  | //go:embed csid-generate.template | ||||||
|  | var templateCSIDGenerateText string | ||||||
|  |  | ||||||
|  | func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | ||||||
|  |  | ||||||
|  | 	files, err := os.ReadDir(sourceDir) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	oldChecksum := "N/A" | ||||||
|  | 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||||
|  | 		content, err := os.ReadFile(destFile) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  | 		if m, ok := rexCSIDChecksumConst.MatchFirst(string(content)); ok { | ||||||
|  | 			oldChecksum = m.GroupByName("cs").Value() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") }) | ||||||
|  | 	langext.SortBy(files, func(v os.DirEntry) string { return v.Name() }) | ||||||
|  |  | ||||||
|  | 	newChecksumStr := goext.GoextVersion | ||||||
|  | 	for _, f := range files { | ||||||
|  | 		content, err := os.ReadFile(path.Join(sourceDir, f.Name())) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  | 		newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	newChecksum := cryptext.BytesSha256([]byte(newChecksumStr)) | ||||||
|  |  | ||||||
|  | 	if newChecksum != oldChecksum { | ||||||
|  | 		fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | ||||||
|  | 	} else { | ||||||
|  | 		fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	allIDs := make([]CSIDDef, 0) | ||||||
|  |  | ||||||
|  | 	pkgname := "" | ||||||
|  |  | ||||||
|  | 	for _, f := range files { | ||||||
|  | 		fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
|  | 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name())) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		fmt.Printf("\n") | ||||||
|  |  | ||||||
|  | 		allIDs = append(allIDs, fileIDs...) | ||||||
|  |  | ||||||
|  | 		if pn != "" { | ||||||
|  | 			pkgname = pn | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if pkgname == "" { | ||||||
|  | 		return errors.New("no package name found in any file") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	fdata, err := format.Source([]byte(fmtCSIDOutput(newChecksum, allIDs, pkgname))) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | ||||||
|  | 	file, err := os.Open(fn) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defer func() { _ = file.Close() }() | ||||||
|  |  | ||||||
|  | 	bin, err := io.ReadAll(file) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	lines := strings.Split(string(bin), "\n") | ||||||
|  |  | ||||||
|  | 	ids := make([]CSIDDef, 0) | ||||||
|  |  | ||||||
|  | 	pkgname := "" | ||||||
|  |  | ||||||
|  | 	for i, line := range lines { | ||||||
|  | 		if i == 0 && strings.HasPrefix(line, "// Code generated by") { | ||||||
|  | 			break | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if match, ok := rexCSIDPackage.MatchFirst(line); i == 0 && ok { | ||||||
|  | 			pkgname = match.GroupByName("name").Value() | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if match, ok := rexCSIDDef.MatchFirst(line); ok { | ||||||
|  |  | ||||||
|  | 			rfp, err := filepath.Rel(basedir, fn) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, "", err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			def := CSIDDef{ | ||||||
|  | 				File:         fn, | ||||||
|  | 				FileRelative: rfp, | ||||||
|  | 				Name:         match.GroupByName("name").Value(), | ||||||
|  | 				Prefix:       match.GroupByName("prefix").Value(), | ||||||
|  | 			} | ||||||
|  | 			fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||||
|  | 			ids = append(ids, def) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return ids, pkgname, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func fmtCSIDOutput(cs string, ids []CSIDDef, pkgname string) string { | ||||||
|  | 	templ := template.Must(template.New("csid-generate").Parse(templateCSIDGenerateText)) | ||||||
|  |  | ||||||
|  | 	buffer := bytes.Buffer{} | ||||||
|  |  | ||||||
|  | 	err := templ.Execute(&buffer, langext.H{ | ||||||
|  | 		"PkgName":      pkgname, | ||||||
|  | 		"Checksum":     cs, | ||||||
|  | 		"GoextVersion": goext.GoextVersion, | ||||||
|  | 		"IDs":          ids, | ||||||
|  | 	}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return buffer.String() | ||||||
|  | } | ||||||
							
								
								
									
										190
									
								
								bfcodegen/csid-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										190
									
								
								bfcodegen/csid-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,190 @@ | |||||||
|  | // Code generated by csid-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "crypto/rand" | ||||||
|  | import "crypto/sha256" | ||||||
|  | import "fmt" | ||||||
|  | import "github.com/go-playground/validator/v10" | ||||||
|  | import "github.com/rs/zerolog/log" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
|  | import "math/big" | ||||||
|  | import "reflect" | ||||||
|  | import "regexp" | ||||||
|  | import "strings" | ||||||
|  |  | ||||||
|  | const ChecksumCharsetIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | const idlen = 24 | ||||||
|  |  | ||||||
|  | const checklen = 1 | ||||||
|  |  | ||||||
|  | const idCharset = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" | ||||||
|  | const idCharsetLen = len(idCharset) | ||||||
|  |  | ||||||
|  | var charSetReverseMap = generateCharsetMap() | ||||||
|  |  | ||||||
|  | const ({{range .IDs}} | ||||||
|  | 	prefix{{.Name}} = "{{.Prefix}}" {{end}} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var ({{range .IDs}} | ||||||
|  | 	regex{{.Name}} = generateRegex(prefix{{.Name}}) {{end}} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func generateRegex(prefix string) rext.Regex { | ||||||
|  | 	return rext.W(regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen))) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateCharsetMap() []int { | ||||||
|  | 	result := make([]int, 128) | ||||||
|  | 	for i := 0; i < len(result); i++ { | ||||||
|  | 		result[i] = -1 | ||||||
|  | 	} | ||||||
|  | 	for idx, chr := range idCharset { | ||||||
|  | 		result[int(chr)] = idx | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateID(prefix string) string { | ||||||
|  | 	k := "" | ||||||
|  | 	csMax := big.NewInt(int64(idCharsetLen)) | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := 0; i < idlen-len(prefix)-checklen; i++ { | ||||||
|  | 		v, err := rand.Int(rand.Reader, csMax) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err) | ||||||
|  | 		} | ||||||
|  | 		v64 := v.Int64() | ||||||
|  | 		k += string(idCharset[v64]) | ||||||
|  | 		checksum = (checksum + int(v64)) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  | 	return prefix + k + checkstr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateIDFromSeed(prefix string, seed string) string { | ||||||
|  | 	h := sha256.New() | ||||||
|  |  | ||||||
|  | 	iddata := "" | ||||||
|  | 	for len(iddata) < idlen-len(prefix)-checklen { | ||||||
|  | 		h.Write([]byte(seed)) | ||||||
|  | 		bs := h.Sum(nil) | ||||||
|  | 		iddata += langext.NewAnyBaseConverter(idCharset).Encode(bs) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := 0; i < idlen-len(prefix)-checklen; i++ { | ||||||
|  | 		ichr := int(iddata[i]) | ||||||
|  | 		checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  |  | ||||||
|  | 	return prefix + iddata[:(idlen-len(prefix)-checklen)] + checkstr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func validateID(prefix string, value string) error { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !strings.HasPrefix(value, prefix) { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id is missing the correct prefix").Str("value", value).Str("prefix", prefix).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := len(prefix); i < len(value)-checklen; i++ { | ||||||
|  | 		ichr := int(value[i]) | ||||||
|  | 		if ichr < 0 || ichr >= len(charSetReverseMap) || charSetReverseMap[ichr] == -1 { | ||||||
|  | 			return exerr.New(exerr.TypeInvalidCSID, "id contains invalid characters").Str("value", value).Build() | ||||||
|  | 		} | ||||||
|  | 		checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  |  | ||||||
|  | 	if !strings.HasSuffix(value, checkstr) { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id checkstring is invalid").Str("value", value).Str("checkstr", checkstr).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func getRawData(prefix string, value string) string { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return "" | ||||||
|  | 	} | ||||||
|  | 	return value[len(prefix) : idlen-checklen] | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func getCheckString(prefix string, value string) string { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return "" | ||||||
|  | 	} | ||||||
|  | 	return value[idlen-checklen:] | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ValidateEntityID(vfl validator.FieldLevel) bool { | ||||||
|  | 	if !vfl.Field().CanInterface() { | ||||||
|  | 		log.Error().Msgf("Failed to validate EntityID (cannot interface ?!?)") | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ifvalue := vfl.Field().Interface() | ||||||
|  |  | ||||||
|  | 	if value1, ok := ifvalue.(EntityID); ok { | ||||||
|  |  | ||||||
|  | 		if vfl.Field().Type().Kind() == reflect.Pointer && langext.IsNil(value1) { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if err := value1.Valid(); err != nil { | ||||||
|  | 			log.Debug().Msgf("Failed to validate EntityID '%s' (%s)", value1.String(), err.Error()) | ||||||
|  | 			return false | ||||||
|  | 		} else { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  | 		log.Error().Msgf("Failed to validate EntityID (wrong type: %T)", ifvalue) | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{range .IDs}} | ||||||
|  |  | ||||||
|  | // ================================ {{.Name}} ({{.FileRelative}}) ================================ | ||||||
|  |  | ||||||
|  | func New{{.Name}}() {{.Name}} { | ||||||
|  | 	return {{.Name}}(generateID(prefix{{.Name}})) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Valid() error { | ||||||
|  | 	return validateID(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) String() string { | ||||||
|  | 	return string(i) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) Prefix() string { | ||||||
|  | 	return prefix{{.Name}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Raw() string { | ||||||
|  | 	return getRawData(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) CheckString() string { | ||||||
|  | 	return getCheckString(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Regex() rext.Regex { | ||||||
|  | 	return regex{{.Name}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
							
								
								
									
										52
									
								
								bfcodegen/csid-generate_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								bfcodegen/csid-generate_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | //go:embed _test_example.tgz | ||||||
|  | var CSIDExampleModels []byte | ||||||
|  |  | ||||||
|  | func TestGenerateCSIDSpecs(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, CSIDExampleModels, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/csid_gen.go"))(t))) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | } | ||||||
| @@ -1,10 +1,12 @@ | |||||||
| package bfcodegen | package bfcodegen | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -14,7 +16,7 @@ import ( | |||||||
| 	"path/filepath" | 	"path/filepath" | ||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"text/template" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type EnumDefVal struct { | type EnumDefVal struct { | ||||||
| @@ -35,10 +37,13 @@ var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_] | |||||||
|  |  | ||||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||||
|  |  | ||||||
| var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`)) | var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`)) | ||||||
|  |  | ||||||
| var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|  | //go:embed enum-generate.template | ||||||
|  | var templateEnumGenerateText string | ||||||
|  |  | ||||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| @@ -104,25 +109,16 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		return errors.New("no package name found in any file") | 		return errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtEnumOutput(newChecksum, allEnums, pkgname)), 0o755) | 	fdata, err := format.Source([]byte(fmtEnumOutput(newChecksum, allEnums, pkgname))) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -204,133 +200,32 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string { | func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string { | ||||||
| 	str := "// Code generated by enum-generate.go DO NOT EDIT.\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += "package " + pkgname + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n" | 	templ := template.New("enum-generate") | ||||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/enums\"" + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "const ChecksumEnumGenerator = \"" + cs + "\" // GoExtVersion: " + goext.GoextVersion + "\n" | 	templ = templ.Funcs(template.FuncMap{ | ||||||
| 	str += "\n" | 		"boolToStr": func(b bool) string { return langext.Conditional(b, "true", "false") }, | ||||||
|  | 		"deref":     func(v *string) string { return *v }, | ||||||
|  | 		"trimSpace": func(str string) string { return strings.TrimSpace(str) }, | ||||||
|  | 		"hasStr":    func(v EnumDef) bool { return v.Type == "string" }, | ||||||
|  | 		"hasDescr": func(v EnumDef) bool { | ||||||
|  | 			return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil }) | ||||||
|  | 		}, | ||||||
|  | 	}) | ||||||
|  |  | ||||||
| 	for _, enumdef := range enums { | 	templ = template.Must(templ.Parse(templateEnumGenerateText)) | ||||||
|  |  | ||||||
| 		hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil }) | 	buffer := bytes.Buffer{} | ||||||
| 		hasStr := enumdef.Type == "string" |  | ||||||
|  |  | ||||||
| 		str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n" | 	err := templ.Execute(&buffer, langext.H{ | ||||||
| 		str += "//" + "\n" | 		"PkgName":      pkgname, | ||||||
| 		str += "// File:       " + enumdef.FileRelative + "\n" | 		"Checksum":     cs, | ||||||
| 		str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n" | 		"GoextVersion": goext.GoextVersion, | ||||||
| 		str += "// DescrEnum:  " + langext.Conditional(hasDescr, "true", "false") + "\n" | 		"Enums":        enums, | ||||||
| 		str += "//" + "\n" | 	}) | ||||||
| 		str += "" + "\n" | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
| 		str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n" |  | ||||||
| 		for _, v := range enumdef.Values { |  | ||||||
| 			str += "    " + v.VarName + "," + "\n" |  | ||||||
| 		} |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		if hasDescr { |  | ||||||
| 			str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n" |  | ||||||
| 			for _, v := range enumdef.Values { |  | ||||||
| 				str += "    " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n" |  | ||||||
| 			} |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 		str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n" | 	return buffer.String() | ||||||
| 		for _, v := range enumdef.Values { |  | ||||||
| 			str += "    " + v.VarName + ": \"" + v.VarName + "\"," + "\n" |  | ||||||
| 		} |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n" |  | ||||||
| 		str += "    return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n" |  | ||||||
| 		str += "    return __" + enumdef.EnumTypeName + "Values" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n" |  | ||||||
| 		str += "    return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []enums.EnumMetaValue {" + "\n" |  | ||||||
| 		str += "    return " + enumdef.EnumTypeName + "ValuesMeta()" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		if hasStr { |  | ||||||
| 			str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n" |  | ||||||
| 			str += "    return string(e)" + "\n" |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if hasDescr { |  | ||||||
| 			str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n" |  | ||||||
| 			str += "    if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n" |  | ||||||
| 			str += "        return d" + "\n" |  | ||||||
| 			str += "    }" + "\n" |  | ||||||
| 			str += "    return \"\"" + "\n" |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n" |  | ||||||
| 		str += "    if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n" |  | ||||||
| 		str += "        return d" + "\n" |  | ||||||
| 		str += "    }" + "\n" |  | ||||||
| 		str += "    return \"\"" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Meta() enums.EnumMetaValue {" + "\n" |  | ||||||
| 		if hasDescr { |  | ||||||
| 			str += "    return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())}" |  | ||||||
| 		} else { |  | ||||||
| 			str += "    return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}" |  | ||||||
| 		} |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n" |  | ||||||
| 		str += "    for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n" |  | ||||||
| 		str += "        if string(ev) == vv {" + "\n" |  | ||||||
| 		str += "            return ev, true" + "\n" |  | ||||||
| 		str += "        }" + "\n" |  | ||||||
| 		str += "    }" + "\n" |  | ||||||
| 		str += "    return \"\", false" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n" |  | ||||||
| 		str += "    return __" + enumdef.EnumTypeName + "Values" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func " + enumdef.EnumTypeName + "ValuesMeta() []enums.EnumMetaValue {" + "\n" |  | ||||||
| 		str += "    return []enums.EnumMetaValue{" + "\n" |  | ||||||
| 		for _, v := range enumdef.Values { |  | ||||||
| 			str += "        " + v.VarName + ".Meta(),\n" |  | ||||||
| 		} |  | ||||||
| 		str += "    }" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return str |  | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										97
									
								
								bfcodegen/enum-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								bfcodegen/enum-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,97 @@ | |||||||
|  | // Code generated by enum-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/enums" | ||||||
|  |  | ||||||
|  | const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | {{range .Enums}} | ||||||
|  |  | ||||||
|  | {{ $hasStr   := ( . | hasStr   ) }} | ||||||
|  | {{ $hasDescr := ( . | hasDescr ) }} | ||||||
|  |  | ||||||
|  | // ================================ {{.EnumTypeName}} ================================ | ||||||
|  | // | ||||||
|  | // File:       {{.FileRelative}} | ||||||
|  | // StringEnum: {{$hasStr   | boolToStr}} | ||||||
|  | // DescrEnum:  {{$hasDescr | boolToStr}} | ||||||
|  | // | ||||||
|  |  | ||||||
|  | var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}} | ||||||
|  | 	{{.VarName}}, {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}} | ||||||
|  | 	{{.VarName}}: "{{.Description | deref | trimSpace}}", {{end}} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | var __{{.EnumTypeName}}Varnames = map[{{.EnumTypeName}}]string{ {{range .Values}} | ||||||
|  | 	{{.VarName}}: "{{.VarName}}", {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Valid() bool { | ||||||
|  | 	return langext.InArray(e, __{{.EnumTypeName}}Values) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Values() []{{.EnumTypeName}} { | ||||||
|  | 	return __{{.EnumTypeName}}Values | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) ValuesAny() []any { | ||||||
|  | 	return langext.ArrCastToAny(__{{.EnumTypeName}}Values) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return {{.EnumTypeName}}ValuesMeta() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if $hasStr}} | ||||||
|  | func (e {{.EnumTypeName}}) String() string { | ||||||
|  | 	return string(e) | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | func (e {{.EnumTypeName}}) Description() string { | ||||||
|  | 	if d, ok := __{{.EnumTypeName}}Descriptions[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) VarName() string { | ||||||
|  | 	if d, ok := __{{.EnumTypeName}}Varnames[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | ||||||
|  |     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) { | ||||||
|  | 	for _, ev := range __{{.EnumTypeName}}Values { | ||||||
|  | 		if string(ev) == vv { | ||||||
|  | 			return ev, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func {{.EnumTypeName}}Values() []{{.EnumTypeName}} { | ||||||
|  | 	return __{{.EnumTypeName}}Values | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func {{.EnumTypeName}}ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return []enums.EnumMetaValue{ {{range .Values}} | ||||||
|  |             {{.VarName}}.Meta(), {{end}} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
| @@ -2,6 +2,7 @@ package bfcodegen | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	_ "embed" | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
| @@ -12,7 +13,7 @@ import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| //go:embed _test_example.tgz | //go:embed _test_example.tgz | ||||||
| var ExampleModels []byte | var EnumExampleModels []byte | ||||||
|  |  | ||||||
| func TestGenerateEnumSpecs(t *testing.T) { | func TestGenerateEnumSpecs(t *testing.T) { | ||||||
|  |  | ||||||
| @@ -20,7 +21,7 @@ func TestGenerateEnumSpecs(t *testing.T) { | |||||||
|  |  | ||||||
| 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
| 	err := os.WriteFile(tmpFile, ExampleModels, 0o777) | 	err := os.WriteFile(tmpFile, EnumExampleModels, 0o777) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
| @@ -39,4 +40,13 @@ func TestGenerateEnumSpecs(t *testing.T) { | |||||||
| 	err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go") | 	err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go") | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/enums_gen.go"))(t))) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,10 +1,12 @@ | |||||||
| package bfcodegen | package bfcodegen | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -14,7 +16,7 @@ import ( | |||||||
| 	"path/filepath" | 	"path/filepath" | ||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"text/template" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type IDDef struct { | type IDDef struct { | ||||||
| @@ -29,6 +31,9 @@ var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+s | |||||||
|  |  | ||||||
| var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|  | //go:embed id-generate.template | ||||||
|  | var templateIDGenerateText string | ||||||
|  |  | ||||||
| func GenerateIDSpecs(sourceDir string, destFile string) error { | func GenerateIDSpecs(sourceDir string, destFile string) error { | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| @@ -94,25 +99,16 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | |||||||
| 		return errors.New("no package name found in any file") | 		return errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtIDOutput(newChecksum, allIDs, pkgname)), 0o755) | 	fdata, err := format.Source([]byte(fmtIDOutput(newChecksum, allIDs, pkgname))) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -166,71 +162,22 @@ func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func fmtIDOutput(cs string, ids []IDDef, pkgname string) string { | func fmtIDOutput(cs string, ids []IDDef, pkgname string) string { | ||||||
| 	str := "// Code generated by id-generate.go DO NOT EDIT.\n" | 	templ := template.Must(template.New("id-generate").Parse(templateIDGenerateText)) | ||||||
| 	str += "\n" |  | ||||||
| 	str += "package " + pkgname + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "import \"go.mongodb.org/mongo-driver/bson\"" + "\n" | 	buffer := bytes.Buffer{} | ||||||
| 	str += "import \"go.mongodb.org/mongo-driver/bson/bsontype\"" + "\n" |  | ||||||
| 	str += "import \"go.mongodb.org/mongo-driver/bson/primitive\"" + "\n" |  | ||||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/exerr\"" + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "const ChecksumIDGenerator = \"" + cs + "\" // GoExtVersion: " + goext.GoextVersion + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" }) | 	anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" }) | ||||||
|  |  | ||||||
| 	for _, iddef := range ids { | 	err := templ.Execute(&buffer, langext.H{ | ||||||
|  | 		"PkgName":      pkgname, | ||||||
| 		str += "// ================================ " + iddef.Name + " (" + iddef.FileRelative + ") ================================" + "\n" | 		"Checksum":     cs, | ||||||
| 		str += "" + "\n" | 		"GoextVersion": goext.GoextVersion, | ||||||
|  | 		"IDs":          ids, | ||||||
| 		str += "func (i " + iddef.Name + ") MarshalBSONValue() (bsontype.Type, []byte, error) {" + "\n" | 		"AnyDef":       anyDef, | ||||||
| 		str += "	if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil {" + "\n" | 	}) | ||||||
| 		str += "		return bson.MarshalValue(objId)" + "\n" | 	if err != nil { | ||||||
| 		str += "	} else {" + "\n" | 		panic(err) | ||||||
| 		str += "		return 0, nil, exerr.New(exerr.TypeMarshalEntityID, \"Failed to marshal " + iddef.Name + "(\"+i.String()+\") to ObjectId\").Str(\"value\", string(i)).Type(\"type\", i).Build()" + "\n" |  | ||||||
| 		str += "	}" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (i " + iddef.Name + ") String() string {" + "\n" |  | ||||||
| 		str += "	return string(i)" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (i " + iddef.Name + ") ObjID() (primitive.ObjectID, error) {" + "\n" |  | ||||||
| 		str += "	return primitive.ObjectIDFromHex(string(i))" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (i " + iddef.Name + ") Valid() bool {" + "\n" |  | ||||||
| 		str += "	_, err := primitive.ObjectIDFromHex(string(i))" + "\n" |  | ||||||
| 		str += "	return err == nil" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		if anyDef != nil { |  | ||||||
| 			str += "func (i " + iddef.Name + ") AsAny() " + anyDef.Name + " {" + "\n" |  | ||||||
| 			str += "	return " + anyDef.Name + "(i)" + "\n" |  | ||||||
| 			str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 		str += "func New" + iddef.Name + "() " + iddef.Name + " {" + "\n" | 	return buffer.String() | ||||||
| 		str += "	return " + iddef.Name + "(primitive.NewObjectID().Hex())" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return str |  | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										47
									
								
								bfcodegen/id-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								bfcodegen/id-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,47 @@ | |||||||
|  | // Code generated by id-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "go.mongodb.org/mongo-driver/bson" | ||||||
|  | import "go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | import "go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  |  | ||||||
|  | const ChecksumIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | {{range .IDs}} | ||||||
|  |  | ||||||
|  | // ================================ {{.Name}} ({{.FileRelative}}) ================================ | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil { | ||||||
|  | 		return bson.MarshalValue(objId) | ||||||
|  | 	} else { | ||||||
|  | 		return 0, nil, exerr.New(exerr.TypeMarshalEntityID, "Failed to marshal {{.Name}}("+i.String()+") to ObjectId").Str("value", string(i)).Type("type", i).Build() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) String() string { | ||||||
|  | 	return string(i) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) ObjID() (primitive.ObjectID, error) { | ||||||
|  | 	return primitive.ObjectIDFromHex(string(i)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) Valid() bool { | ||||||
|  | 	_, err := primitive.ObjectIDFromHex(string(i)) | ||||||
|  | 	return err == nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if ne $.AnyDef nil}} | ||||||
|  | func (i {{.Name}}) AsAny() {{$.AnyDef.Name}} { | ||||||
|  | 	return {{$.AnyDef.Name}}(i) | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | func New{{.Name}}() {{.Name}} { | ||||||
|  | 	return {{.Name}}(primitive.NewObjectID().Hex()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
							
								
								
									
										52
									
								
								bfcodegen/id-generate_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								bfcodegen/id-generate_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | //go:embed _test_example.tgz | ||||||
|  | var IDExampleModels []byte | ||||||
|  |  | ||||||
|  | func TestGenerateIDSpecs(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, IDExampleModels, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/id_gen.go"))(t))) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | } | ||||||
| @@ -41,13 +41,13 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error | |||||||
| 			continue | 			continue | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		if rvfield.Kind() == reflect.Struct { |  | ||||||
|  |  | ||||||
| 		envkey, found := rsfield.Tag.Lookup("env") | 		envkey, found := rsfield.Tag.Lookup("env") | ||||||
| 		if !found || envkey == "-" { | 		if !found || envkey == "-" { | ||||||
| 			continue | 			continue | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if rvfield.Kind() == reflect.Struct && rvfield.Type() != reflect.TypeOf(time.UnixMilli(0)) { | ||||||
|  |  | ||||||
| 			subPrefix := prefix | 			subPrefix := prefix | ||||||
| 			if envkey != "" { | 			if envkey != "" { | ||||||
| 				subPrefix = subPrefix + envkey + delim | 				subPrefix = subPrefix + envkey + delim | ||||||
| @@ -57,10 +57,7 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error | |||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return err | 				return err | ||||||
| 			} | 			} | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		envkey := rsfield.Tag.Get("env") |  | ||||||
| 		if envkey == "" || envkey == "-" { |  | ||||||
| 			continue | 			continue | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|   | |||||||
| @@ -66,7 +66,6 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo | |||||||
| 		return int(version), nil, payload, false, nil, true | 		return int(version), nil, payload, false, nil, true | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// |  | ||||||
| 	if version == 2 { | 	if version == 2 { | ||||||
| 		if len(split) != 3 { | 		if len(split) != 3 { | ||||||
| 			return -1, nil, nil, false, nil, false | 			return -1, nil, nil, false, nil, false | ||||||
|   | |||||||
| @@ -4,6 +4,10 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | type RawFilter interface { | ||||||
|  | 	FilterQuery() mongo.Pipeline | ||||||
|  | } | ||||||
|  |  | ||||||
| type Filter interface { | type Filter interface { | ||||||
| 	FilterQuery() mongo.Pipeline | 	FilterQuery() mongo.Pipeline | ||||||
| 	Pagination() (string, SortDirection, string, SortDirection) | 	Pagination() (string, SortDirection, string, SortDirection) | ||||||
|   | |||||||
| @@ -70,6 +70,7 @@ func init() { | |||||||
| type Builder struct { | type Builder struct { | ||||||
| 	errorData       *ExErr | 	errorData       *ExErr | ||||||
| 	containsGinData bool | 	containsGinData bool | ||||||
|  | 	noLog           bool | ||||||
| } | } | ||||||
|  |  | ||||||
| func Get(err error) *Builder { | func Get(err error) *Builder { | ||||||
| @@ -190,6 +191,13 @@ func (b *Builder) System() *Builder { | |||||||
|  |  | ||||||
| // ---------------------------------------------------------------------------- | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | func (b *Builder) NoLog() *Builder { | ||||||
|  | 	b.noLog = true | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
| func (b *Builder) Id(key string, val fmt.Stringer) *Builder { | func (b *Builder) Id(key string, val fmt.Stringer) *Builder { | ||||||
| 	return b.addMeta(key, MDTID, newIDWrap(val)) | 	return b.addMeta(key, MDTID, newIDWrap(val)) | ||||||
| } | } | ||||||
| @@ -275,7 +283,7 @@ func (b *Builder) Any(key string, val any) *Builder { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder { | func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder { | ||||||
| 	if val == nil { | 	if langext.IsNil(val) { | ||||||
| 		return b.addMeta(key, MDTString, "(!nil)") | 		return b.addMeta(key, MDTString, "(!nil)") | ||||||
| 	} else { | 	} else { | ||||||
| 		return b.addMeta(key, MDTString, val.String()) | 		return b.addMeta(key, MDTString, val.String()) | ||||||
| @@ -401,12 +409,14 @@ func extractHeader(header map[string][]string) []string { | |||||||
|  |  | ||||||
| // Build creates a new error, ready to pass up the stack | // Build creates a new error, ready to pass up the stack | ||||||
| // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | ||||||
|  | // Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces | ||||||
|  | // Can be locally suppressed with Builder.NoLog() | ||||||
| func (b *Builder) Build() error { | func (b *Builder) Build() error { | ||||||
| 	warnOnPkgConfigNotInitialized() | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
| 	if pkgconfig.ZeroLogErrTraces && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | 	if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} else if pkgconfig.ZeroLogAllTraces { | 	} else if pkgconfig.ZeroLogAllTraces && !b.noLog { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|   | |||||||
| @@ -46,6 +46,7 @@ var ( | |||||||
| 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | ||||||
| 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) | 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) | ||||||
| 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | ||||||
|  | 	TypeMongoInvalidOpt   = NewType("MONGO_INVALIDOPT", langext.Ptr(500)) | ||||||
|  |  | ||||||
| 	TypeWrap = NewType("Wrap", nil) | 	TypeWrap = NewType("Wrap", nil) | ||||||
|  |  | ||||||
| @@ -56,6 +57,7 @@ var ( | |||||||
| 	TypeBindFailHeader   = NewType("BINDFAIL_HEADER", langext.Ptr(400)) | 	TypeBindFailHeader   = NewType("BINDFAIL_HEADER", langext.Ptr(400)) | ||||||
|  |  | ||||||
| 	TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400)) | 	TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400)) | ||||||
|  | 	TypeInvalidCSID     = NewType("INVALID_CSID", langext.Ptr(400)) | ||||||
|  |  | ||||||
| 	TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401)) | 	TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401)) | ||||||
| 	TypeAuthFailed   = NewType("AUTH_FAILED", langext.Ptr(401)) | 	TypeAuthFailed   = NewType("AUTH_FAILED", langext.Ptr(401)) | ||||||
|   | |||||||
| @@ -68,6 +68,10 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
| 	initialized = true | 	initialized = true | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func Initialized() bool { | ||||||
|  | 	return initialized | ||||||
|  | } | ||||||
|  |  | ||||||
| func warnOnPkgConfigNotInitialized() { | func warnOnPkgConfigNotInitialized() { | ||||||
| 	if !initialized { | 	if !initialized { | ||||||
| 		fmt.Printf("\n") | 		fmt.Printf("\n") | ||||||
|   | |||||||
| @@ -30,7 +30,7 @@ type ExErr struct { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (ee *ExErr) Error() string { | func (ee *ExErr) Error() string { | ||||||
| 	return ee.Message | 	return ee.RecursiveMessage() | ||||||
| } | } | ||||||
|  |  | ||||||
| // Unwrap must be implemented so that some error.XXX methods work | // Unwrap must be implemented so that some error.XXX methods work | ||||||
| @@ -169,14 +169,32 @@ func (ee *ExErr) ShortLog(evt *zerolog.Event) { | |||||||
|  |  | ||||||
| // RecursiveMessage returns the message to show | // RecursiveMessage returns the message to show | ||||||
| // = first error (top-down) that is not wrapping/foreign/empty | // = first error (top-down) that is not wrapping/foreign/empty | ||||||
|  | // = lowest level error (that is not empty) | ||||||
|  | // = fallback to self.message | ||||||
| func (ee *ExErr) RecursiveMessage() string { | func (ee *ExErr) RecursiveMessage() string { | ||||||
|  |  | ||||||
|  | 	// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty | ||||||
|  |  | ||||||
| 	for curr := ee; curr != nil; curr = curr.OriginalError { | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
| 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | ||||||
| 			return curr.Message | 			return curr.Message | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// fallback to self | 	// ==== [2] ==== lowest level error (that is not empty) | ||||||
|  |  | ||||||
|  | 	deepestMsg := "" | ||||||
|  | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
|  | 		if curr.Message != "" { | ||||||
|  | 			deepestMsg = curr.Message | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if deepestMsg != "" { | ||||||
|  | 		return deepestMsg | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// ==== [3] ==== fallback to self.message | ||||||
|  |  | ||||||
| 	return ee.Message | 	return ee.Message | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										36
									
								
								fsext/exists.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								fsext/exists.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | |||||||
|  | package fsext | ||||||
|  |  | ||||||
|  | import "os" | ||||||
|  |  | ||||||
|  | func PathExists(fp string) (bool, error) { | ||||||
|  | 	_, err := os.Stat(fp) | ||||||
|  | 	if err == nil { | ||||||
|  | 		return true, nil | ||||||
|  | 	} | ||||||
|  | 	if os.IsNotExist(err) { | ||||||
|  | 		return false, nil | ||||||
|  | 	} | ||||||
|  | 	return false, err | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func FileExists(fp string) (bool, error) { | ||||||
|  | 	stat, err := os.Stat(fp) | ||||||
|  | 	if err == nil { | ||||||
|  | 		return !stat.IsDir(), nil | ||||||
|  | 	} | ||||||
|  | 	if os.IsNotExist(err) { | ||||||
|  | 		return false, nil | ||||||
|  | 	} | ||||||
|  | 	return false, err | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func DirectoryExists(fp string) (bool, error) { | ||||||
|  | 	stat, err := os.Stat(fp) | ||||||
|  | 	if err == nil { | ||||||
|  | 		return stat.IsDir(), nil | ||||||
|  | 	} | ||||||
|  | 	if os.IsNotExist(err) { | ||||||
|  | 		return false, nil | ||||||
|  | 	} | ||||||
|  | 	return false, err | ||||||
|  | } | ||||||
| @@ -1,12 +1,15 @@ | |||||||
| package ginext | package ginext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
| 	"context" | 	"context" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"github.com/gin-gonic/gin/binding" | 	"github.com/gin-gonic/gin/binding" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"io" | ||||||
| 	"runtime/debug" | 	"runtime/debug" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -17,6 +20,7 @@ type PreContext struct { | |||||||
| 	uri     any | 	uri     any | ||||||
| 	query   any | 	query   any | ||||||
| 	body    any | 	body    any | ||||||
|  | 	rawbody *[]byte | ||||||
| 	form    any | 	form    any | ||||||
| 	header  any | 	header  any | ||||||
| 	timeout *time.Duration | 	timeout *time.Duration | ||||||
| @@ -37,6 +41,11 @@ func (pctx *PreContext) Body(body any) *PreContext { | |||||||
| 	return pctx | 	return pctx | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (pctx *PreContext) RawBody(rawbody *[]byte) *PreContext { | ||||||
|  | 	pctx.rawbody = rawbody | ||||||
|  | 	return pctx | ||||||
|  | } | ||||||
|  |  | ||||||
| func (pctx *PreContext) Form(form any) *PreContext { | func (pctx *PreContext) Form(form any) *PreContext { | ||||||
| 	pctx.form = form | 	pctx.form = form | ||||||
| 	return pctx | 	return pctx | ||||||
| @@ -90,6 +99,23 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if pctx.rawbody != nil { | ||||||
|  | 		if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok { | ||||||
|  | 			v, err := brc.BufferedAll() | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, nil, langext.Ptr(Error(err)) | ||||||
|  | 			} | ||||||
|  | 			*pctx.rawbody = v | ||||||
|  | 		} else { | ||||||
|  | 			buf := &bytes.Buffer{} | ||||||
|  | 			_, err := io.Copy(buf, pctx.ginCtx.Request.Body) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, nil, langext.Ptr(Error(err)) | ||||||
|  | 			} | ||||||
|  | 			*pctx.rawbody = buf.Bytes() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if pctx.form != nil { | 	if pctx.form != nil { | ||||||
| 		if pctx.ginCtx.ContentType() == "multipart/form-data" { | 		if pctx.ginCtx.ContentType() == "multipart/form-data" { | ||||||
| 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | ||||||
|   | |||||||
							
								
								
									
										30
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										30
									
								
								go.mod
									
									
									
									
									
								
							| @@ -1,35 +1,35 @@ | |||||||
| module gogs.mikescher.com/BlackForestBytes/goext | module gogs.mikescher.com/BlackForestBytes/goext | ||||||
|  |  | ||||||
| go 1.19 | go 1.21 | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/gin-gonic/gin v1.9.1 | 	github.com/gin-gonic/gin v1.9.1 | ||||||
| 	github.com/jmoiron/sqlx v1.3.5 | 	github.com/jmoiron/sqlx v1.3.5 | ||||||
| 	github.com/rs/xid v1.5.0 | 	github.com/rs/xid v1.5.0 | ||||||
| 	github.com/rs/zerolog v1.31.0 | 	github.com/rs/zerolog v1.31.0 | ||||||
| 	go.mongodb.org/mongo-driver v1.12.1 | 	go.mongodb.org/mongo-driver v1.13.0 | ||||||
| 	golang.org/x/crypto v0.14.0 | 	golang.org/x/crypto v0.15.0 | ||||||
| 	golang.org/x/sys v0.13.0 | 	golang.org/x/sys v0.14.0 | ||||||
| 	golang.org/x/term v0.13.0 | 	golang.org/x/term v0.14.0 | ||||||
| ) | ) | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/bytedance/sonic v1.10.2 // indirect | 	github.com/bytedance/sonic v1.10.2 // indirect | ||||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | ||||||
| 	github.com/chenzhuoyu/iasm v0.9.0 // indirect | 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | ||||||
| 	github.com/gabriel-vasile/mimetype v1.4.2 // indirect | 	github.com/gabriel-vasile/mimetype v1.4.3 // indirect | ||||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||||
| 	github.com/go-playground/locales v0.14.1 // indirect | 	github.com/go-playground/locales v0.14.1 // indirect | ||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| 	github.com/go-playground/validator/v10 v10.15.5 // indirect | 	github.com/go-playground/validator/v10 v10.16.0 // indirect | ||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.2 // indirect | ||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
| 	github.com/json-iterator/go v1.1.12 // indirect | 	github.com/json-iterator/go v1.1.12 // indirect | ||||||
| 	github.com/klauspost/compress v1.17.0 // indirect | 	github.com/klauspost/compress v1.17.3 // indirect | ||||||
| 	github.com/klauspost/cpuid/v2 v2.2.5 // indirect | 	github.com/klauspost/cpuid/v2 v2.2.6 // indirect | ||||||
| 	github.com/leodido/go-urn v1.2.4 // indirect | 	github.com/leodido/go-urn v1.2.4 // indirect | ||||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||||
| 	github.com/mattn/go-isatty v0.0.19 // indirect | 	github.com/mattn/go-isatty v0.0.20 // indirect | ||||||
| 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | ||||||
| 	github.com/modern-go/reflect2 v1.0.2 // indirect | 	github.com/modern-go/reflect2 v1.0.2 // indirect | ||||||
| 	github.com/montanaflynn/stats v0.7.1 // indirect | 	github.com/montanaflynn/stats v0.7.1 // indirect | ||||||
| @@ -40,10 +40,10 @@ require ( | |||||||
| 	github.com/xdg-go/scram v1.1.2 // indirect | 	github.com/xdg-go/scram v1.1.2 // indirect | ||||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||||
| 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | ||||||
| 	golang.org/x/arch v0.5.0 // indirect | 	golang.org/x/arch v0.6.0 // indirect | ||||||
| 	golang.org/x/net v0.16.0 // indirect | 	golang.org/x/net v0.18.0 // indirect | ||||||
| 	golang.org/x/sync v0.4.0 // indirect | 	golang.org/x/sync v0.5.0 // indirect | ||||||
| 	golang.org/x/text v0.13.0 // indirect | 	golang.org/x/text v0.14.0 // indirect | ||||||
| 	google.golang.org/protobuf v1.31.0 // indirect | 	google.golang.org/protobuf v1.31.0 // indirect | ||||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||||
| ) | ) | ||||||
|   | |||||||
							
								
								
									
										34
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										34
									
								
								go.sum
									
									
									
									
									
								
							| @@ -8,12 +8,16 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ | |||||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | ||||||
| github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo= | github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo= | ||||||
| github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
|  | github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= | ||||||
|  | github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
| github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | ||||||
| github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
| github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | ||||||
| github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= | github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= | github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= | ||||||
| github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | ||||||
| github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | ||||||
| github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | ||||||
| @@ -25,6 +29,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn | |||||||
| github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | ||||||
| github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= | github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= | ||||||
| github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
|  | github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | ||||||
|  | github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||||
| @@ -45,9 +51,17 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm | |||||||
| github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | ||||||
| github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= | github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= | ||||||
| github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | ||||||
|  | github.com/klauspost/compress v1.17.1 h1:NE3C767s2ak2bweCZo3+rdP4U/HoyVXLv/X9f2gPS5g= | ||||||
|  | github.com/klauspost/compress v1.17.1/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | ||||||
|  | github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= | ||||||
|  | github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | ||||||
|  | github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA= | ||||||
|  | github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= | github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
| github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | ||||||
| github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | ||||||
| github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | ||||||
| @@ -58,6 +72,8 @@ github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovk | |||||||
| github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | ||||||
| github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= | github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= | ||||||
| github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | ||||||
|  | github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= | ||||||
|  | github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | ||||||
| github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= | github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= | ||||||
| github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||||
| @@ -104,15 +120,21 @@ github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/ | |||||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||||
| go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE= | go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE= | ||||||
| go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ= | go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ= | ||||||
|  | go.mongodb.org/mongo-driver v1.13.0 h1:67DgFFjYOCMWdtTEmKFpV3ffWlFnh+CYZ8ZS/tXWUfY= | ||||||
|  | go.mongodb.org/mongo-driver v1.13.0/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ= | ||||||
| golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||||
| golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= | golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= | ||||||
| golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||||
|  | golang.org/x/arch v0.6.0 h1:S0JTfE48HbRj80+4tbvZDYsJ3tGv6BUU3XxyZ7CirAc= | ||||||
|  | golang.org/x/arch v0.6.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||||
| golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||||
| golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | ||||||
| golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | ||||||
| golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= | golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= | ||||||
| golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= | golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= | ||||||
|  | golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA= | ||||||
|  | golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= | ||||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||||
| @@ -121,10 +143,16 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx | |||||||
| golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | ||||||
| golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos= | golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos= | ||||||
| golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | ||||||
|  | golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= | ||||||
|  | golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | ||||||
|  | golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= | ||||||
|  | golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= | ||||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= | golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= | ||||||
| golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= | golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= | ||||||
|  | golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= | ||||||
|  | golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||||
| golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||||
| golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| @@ -138,10 +166,14 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | |||||||
| golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= | golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= | ||||||
| golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
|  | golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= | ||||||
|  | golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||||
| golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||||
| golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= | golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= | ||||||
| golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= | golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= | ||||||
|  | golang.org/x/term v0.14.0 h1:LGK9IlZ8T9jvdy6cTdfKUCltatMFOehAQo9SRC46UQ8= | ||||||
|  | golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww= | ||||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| @@ -150,6 +182,8 @@ golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | |||||||
| golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | ||||||
| golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= | golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= | ||||||
| golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= | golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= | ||||||
|  | golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= | ||||||
|  | golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||||
| golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | ||||||
| golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.284" | const GoextVersion = "0.0.325" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2023-10-09T15:22:57+0200" | const GoextVersionTimestamp = "2023-11-27T14:14:58+0100" | ||||||
|   | |||||||
							
								
								
									
										178
									
								
								langext/baseAny.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										178
									
								
								langext/baseAny.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,178 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"crypto/rand" | ||||||
|  | 	"errors" | ||||||
|  | 	"math" | ||||||
|  | 	"math/big" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type AnyBaseConverter struct { | ||||||
|  | 	base    uint64 | ||||||
|  | 	charset []rune | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewAnyBaseConverter(cs string) AnyBaseConverter { | ||||||
|  | 	rcs := []rune(cs) | ||||||
|  | 	return AnyBaseConverter{ | ||||||
|  | 		base:    uint64(len(rcs)), | ||||||
|  | 		charset: rcs, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Rand(rlen int) string { | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	randMax := big.NewInt(math.MaxInt64) | ||||||
|  |  | ||||||
|  | 	r := "" | ||||||
|  |  | ||||||
|  | 	for i := 0; i < rlen; i++ { | ||||||
|  | 		v, err := rand.Int(rand.Reader, randMax) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		r += string(bc.charset[v.Mod(v, biBase).Int64()]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) EncodeUInt64(num uint64) string { | ||||||
|  | 	if num == 0 { | ||||||
|  | 		return "0" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b := "" | ||||||
|  |  | ||||||
|  | 	// loop as long the num is bigger than zero | ||||||
|  | 	for num > 0 { | ||||||
|  | 		r := num % bc.base | ||||||
|  |  | ||||||
|  | 		num -= r | ||||||
|  | 		num /= base62Base | ||||||
|  |  | ||||||
|  | 		b += string(bc.charset[int(r)]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) DecodeUInt64(str string) (uint64, error) { | ||||||
|  | 	if str == "" { | ||||||
|  | 		return 0, errors.New("empty string") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	result := uint64(0) | ||||||
|  |  | ||||||
|  | 	for _, v := range str { | ||||||
|  | 		result *= base62Base | ||||||
|  |  | ||||||
|  | 		pos := ArrFirstIndex(bc.charset, v) | ||||||
|  | 		if pos == -1 { | ||||||
|  | 			return 0, errors.New("invalid character: " + string(v)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		result += uint64(pos) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Encode(src []byte) string { | ||||||
|  | 	value := new(big.Int) | ||||||
|  | 	value.SetBytes(src) | ||||||
|  | 	return bc.EncodeBigInt(value) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) EncodeBigInt(src *big.Int) string { | ||||||
|  | 	value := new(big.Int) | ||||||
|  | 	value.Set(src) | ||||||
|  |  | ||||||
|  | 	isneg := value.Sign() < 0 | ||||||
|  |  | ||||||
|  | 	answer := "" | ||||||
|  |  | ||||||
|  | 	if isneg { | ||||||
|  | 		value.Neg(value) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	rem := new(big.Int) | ||||||
|  |  | ||||||
|  | 	for value.Sign() > 0 { | ||||||
|  | 		value.QuoRem(value, biBase, rem) | ||||||
|  | 		answer = string(bc.charset[rem.Int64()]) + answer | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if isneg { | ||||||
|  | 		return "-" + answer | ||||||
|  | 	} else { | ||||||
|  | 		return answer | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Decode(src string) ([]byte, error) { | ||||||
|  | 	value, err := bc.DecodeToBigInt(src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, err | ||||||
|  | 	} | ||||||
|  | 	return value.Bytes(), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) DecodeToBigInt(_src string) (*big.Int, error) { | ||||||
|  | 	result := new(big.Int) | ||||||
|  | 	result.SetInt64(0) | ||||||
|  |  | ||||||
|  | 	src := []rune(_src) | ||||||
|  |  | ||||||
|  | 	if len(src) == 0 { | ||||||
|  | 		return nil, errors.New("string is empty") | ||||||
|  | 	} | ||||||
|  | 	if bc.base < 2 { | ||||||
|  | 		return nil, errors.New("not enough digits") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	i := 0 | ||||||
|  |  | ||||||
|  | 	sign := new(big.Int) | ||||||
|  | 	sign.SetInt64(1) | ||||||
|  | 	if src[i] == '+' { | ||||||
|  | 		i++ | ||||||
|  | 	} else if src[i] == '-' { | ||||||
|  | 		i++ | ||||||
|  | 		sign.SetInt64(-1) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if i >= len(src) { | ||||||
|  | 		return nil, errors.New("no digits in input") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	oldResult := new(big.Int) | ||||||
|  |  | ||||||
|  | 	for ; i < len(src); i++ { | ||||||
|  | 		n := ArrFirstIndex(bc.charset, src[i]) | ||||||
|  | 		if n < 0 { | ||||||
|  | 			return nil, errors.New("invalid characters in input") | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		oldResult.Set(result) | ||||||
|  |  | ||||||
|  | 		result.Mul(result, biBase) | ||||||
|  | 		result.Add(result, big.NewInt(int64(n))) | ||||||
|  |  | ||||||
|  | 		if result.Cmp(oldResult) < 0 { | ||||||
|  | 			return nil, errors.New("overflow") | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if sign.Cmp(big.NewInt(0)) < 0 { | ||||||
|  | 		result.Neg(result) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
							
								
								
									
										80
									
								
								langext/baseAny_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								langext/baseAny_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func _anyEncStr(bc AnyBaseConverter, v string) string { | ||||||
|  | 	vr := bc.Encode([]byte(v)) | ||||||
|  | 	return vr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func _anyDecStr(bc AnyBaseConverter, v string) string { | ||||||
|  | 	vr, err := bc.Decode(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(vr) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBase58DefaultEncoding(t *testing.T) { | ||||||
|  | 	tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "Hello"), "9Ajdvzr") | ||||||
|  | 	tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in."), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBase58DefaultDecoding(t *testing.T) { | ||||||
|  | 	tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "9Ajdvzr"), "Hello") | ||||||
|  | 	tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBaseDecode(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	const ( | ||||||
|  | 		Binary  = "01" | ||||||
|  | 		Decimal = "0123456789" | ||||||
|  | 		Hex     = "0123456789ABCDEF" | ||||||
|  | 		DNA     = "ACGT" | ||||||
|  | 		Base32  = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" | ||||||
|  | 		Base58  = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" | ||||||
|  | 		Base62  = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" | ||||||
|  | 		Base64  = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" | ||||||
|  | 		Base256 = "🚀🪐☄🛰🌌🌑🌒🌓🌔🌕🌖🌗🌘🌍🌏🌎🐉☀💻🖥💾💿😂❤😍🤣😊🙏💕😭😘👍😅👏😁🔥🥰💔💖💙😢🤔😆🙄💪😉☺👌🤗💜😔😎😇🌹🤦🎉💞✌✨🤷😱😌🌸🙌😋💗💚😏💛🙂💓🤩😄😀🖤😃💯🙈👇🎶😒🤭❣😜💋👀😪😑💥🙋😞😩😡🤪👊🥳😥🤤👉💃😳✋😚😝😴🌟😬🙃🍀🌷😻😓⭐✅🥺🌈😈🤘💦✔😣🏃💐☹🎊💘😠☝😕🌺🎂🌻😐🖕💝🙊😹🗣💫💀👑🎵🤞😛🔴😤🌼😫⚽🤙☕🏆🤫👈😮🙆🍻🍃🐶💁😲🌿🧡🎁⚡🌞🎈❌✊👋😰🤨😶🤝🚶💰🍓💢🤟🙁🚨💨🤬✈🎀🍺🤓😙💟🌱😖👶🥴▶➡❓💎💸⬇😨🌚🦋😷🕺⚠🙅😟😵👎🤲🤠🤧📌🔵💅🧐🐾🍒😗🤑🌊🤯🐷☎💧😯💆👆🎤🙇🍑❄🌴💣🐸💌📍🥀🤢👅💡💩👐📸👻🤐🤮🎼🥵🚩🍎🍊👼💍📣🥂" | ||||||
|  | 	) | ||||||
|  |  | ||||||
|  | 	type TestDef struct { | ||||||
|  | 		FromCS  string | ||||||
|  | 		FromVal string | ||||||
|  | 		ToCS    string | ||||||
|  | 		ToVal   string | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defs := []TestDef{ | ||||||
|  | 		{Binary, "10100101011100000101010", Decimal, "5421098"}, | ||||||
|  | 		{Decimal, "5421098", DNA, "CCAGGTGAAGGG"}, | ||||||
|  | 		{Decimal, "5421098", DNA, "CCAGGTGAAGGG"}, | ||||||
|  | 		{Decimal, "80085", Base256, "🪐💞🔵"}, | ||||||
|  | 		{Hex, "48656C6C6C20576F526C5421", Base64, "SGVsbGwgV29SbFQh"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base32, "CIMVWGY3B7QFO32SNRPZBB"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base58, "2fUsGKQUcgQcwSqpvy6"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base62, "V34nvybdQ3m3RHk9Sr"}, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, def := range defs { | ||||||
|  |  | ||||||
|  | 		d1 := NewAnyBaseConverter(def.FromCS) | ||||||
|  | 		d2 := NewAnyBaseConverter(def.ToCS) | ||||||
|  |  | ||||||
|  | 		v1 := tst.Must(d1.Decode(def.FromVal))(t) | ||||||
|  | 		v2 := tst.Must(d2.Decode(def.ToVal))(t) | ||||||
|  |  | ||||||
|  | 		tst.AssertArrayEqual(t, v1, v2) | ||||||
|  |  | ||||||
|  | 		str2 := d2.Encode(v1) | ||||||
|  | 		tst.AssertEqual(t, str2, def.ToVal) | ||||||
|  |  | ||||||
|  | 		str1 := d1.Encode(v2) | ||||||
|  | 		tst.AssertEqual(t, str1, def.FromVal) | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -35,7 +35,7 @@ func IsNil(i interface{}) bool { | |||||||
| 		return true | 		return true | ||||||
| 	} | 	} | ||||||
| 	switch reflect.TypeOf(i).Kind() { | 	switch reflect.TypeOf(i).Kind() { | ||||||
| 	case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice: | 	case reflect.Ptr, reflect.Map, reflect.Chan, reflect.Slice, reflect.Func, reflect.UnsafePointer: | ||||||
| 		return reflect.ValueOf(i).IsNil() | 		return reflect.ValueOf(i).IsNil() | ||||||
| 	} | 	} | ||||||
| 	return false | 	return false | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| ) | ) | ||||||
| @@ -12,23 +13,35 @@ import ( | |||||||
| func CreateGoExtBsonRegistry() *bsoncodec.Registry { | func CreateGoExtBsonRegistry() *bsoncodec.Registry { | ||||||
| 	rb := bsoncodec.NewRegistryBuilder() | 	rb := bsoncodec.NewRegistryBuilder() | ||||||
|  |  | ||||||
| 	// otherwise we get []primitve.E when unmarshalling into any |  | ||||||
| 	// which will result in {'key': .., 'value': ...}[] json when json-marshalling |  | ||||||
| 	rb.RegisterTypeMapEntry(bsontype.EmbeddedDocument, reflect.TypeOf(primitive.M{})) |  | ||||||
|  |  | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339Time{}), rfctime.RFC3339Time{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339Time{}), rfctime.RFC3339Time{}) | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339Time{}), rfctime.RFC3339Time{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339Time{}), rfctime.RFC3339Time{}) | ||||||
|  |  | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixTime{}), rfctime.UnixTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixTime{}), rfctime.UnixTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixMilliTime{}), rfctime.UnixMilliTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixMilliTime{}), rfctime.UnixMilliTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixNanoTime{}), rfctime.UnixNanoTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixNanoTime{}), rfctime.UnixNanoTime{}) | ||||||
|  |  | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.Date{}), rfctime.Date{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.Date{}), rfctime.Date{}) | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.Date{}), rfctime.Date{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.Date{}), rfctime.Date{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.SecondsF64(0)), rfctime.SecondsF64(0)) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(rfctime.SecondsF64(0))), rfctime.SecondsF64(0)) | ||||||
|  |  | ||||||
| 	bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) | 	bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) | ||||||
| 	bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb) | 	bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb) | ||||||
|  |  | ||||||
| 	bson.PrimitiveCodecs{}.RegisterPrimitiveCodecs(rb) | 	bson.PrimitiveCodecs{}.RegisterPrimitiveCodecs(rb) | ||||||
|  |  | ||||||
|  | 	// otherwise we get []primitve.E when unmarshalling into any | ||||||
|  | 	// which will result in {'key': .., 'value': ...}[] json when json-marshalling | ||||||
|  | 	rb.RegisterTypeMapEntry(bsontype.EmbeddedDocument, reflect.TypeOf(primitive.M{})) | ||||||
|  |  | ||||||
| 	return rb.Build() | 	return rb.Build() | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										28
									
								
								pagination/filter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								pagination/filter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | |||||||
|  | package pagination | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type Filter interface { | ||||||
|  | 	FilterQuery() mongo.Pipeline | ||||||
|  | 	Sort() bson.D | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type dynamicFilter struct { | ||||||
|  | 	pipeline mongo.Pipeline | ||||||
|  | 	sort     bson.D | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dynamicFilter) FilterQuery() mongo.Pipeline { | ||||||
|  | 	return d.pipeline | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dynamicFilter) Sort() bson.D { | ||||||
|  | 	return d.sort | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CreateFilter(pipeline mongo.Pipeline, sort bson.D) Filter { | ||||||
|  | 	return dynamicFilter{pipeline: pipeline, sort: sort} | ||||||
|  | } | ||||||
							
								
								
									
										16
									
								
								pagination/pagination.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								pagination/pagination.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | |||||||
|  | package pagination | ||||||
|  |  | ||||||
|  | type Pagination struct { | ||||||
|  | 	Page             int `json:"page"`            // page (first page == 1) | ||||||
|  | 	Limit            int `json:"limit"`           // max-page-size | ||||||
|  | 	TotalPages       int `json:"totalPages"`      // total page-count | ||||||
|  | 	TotalItems       int `json:"totalItems"`      // total items-count | ||||||
|  | 	CurrentPageCount int `json:"currntPageCount"` // item-count in current page ( == len(data) ) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CalcPaginationTotalPages(totalItems int, limit int) int { | ||||||
|  | 	if totalItems == 0 { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 + (totalItems-1)/limit | ||||||
|  | } | ||||||
| @@ -2,7 +2,14 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||||
|  | 	"reflect" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -54,6 +61,63 @@ func (d SecondsF64) MarshalJSON() ([]byte, error) { | |||||||
| 	return json.Marshal(secs) | 	return json.Marshal(secs) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (d *SecondsF64) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*d = SecondsF64(0) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDouble { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into SecondsF64", bt)) | ||||||
|  | 	} | ||||||
|  | 	var secValue float64 | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&secValue) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*d = SecondsF64(int64(secValue * float64(time.Second))) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d SecondsF64) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(d.Seconds()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d SecondsF64) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = d.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&d)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(d)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func NewSecondsF64(t time.Duration) SecondsF64 { | func NewSecondsF64(t time.Duration) SecondsF64 { | ||||||
| 	return SecondsF64(t) | 	return SecondsF64(t) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixTime) Serialize() string { | func (t UnixTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().Unix(), 10) | 	return strconv.FormatInt(t.Time().Unix(), 10) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixMilliTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixMilliTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixMilliTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixMilliTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixMilliTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixMilliTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixMilliTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixMilliTime) Serialize() string { | func (t UnixMilliTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().UnixMilli(), 10) | 	return strconv.FormatInt(t.Time().UnixMilli(), 10) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixNanoTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixNanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixNanoTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixNanoTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixNanoTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixNanoTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixNanoTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixNanoTime) Serialize() string { | func (t UnixNanoTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().UnixNano(), 10) | 	return strconv.FormatInt(t.Time().UnixNano(), 10) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,33 +1,28 @@ | |||||||
| package syncext | package syncext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" |  | ||||||
| 	"sync" | 	"sync" | ||||||
| 	"time" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type AtomicBool struct { | type Atomic[T any] struct { | ||||||
| 	v        bool | 	v    T | ||||||
| 	listener map[string]chan bool | 	lock sync.RWMutex | ||||||
| 	lock     sync.Mutex |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewAtomicBool(value bool) *AtomicBool { | func NewAtomic[T any](value T) *Atomic[T] { | ||||||
| 	return &AtomicBool{ | 	return &Atomic[T]{ | ||||||
| 		v:    value, | 		v:    value, | ||||||
| 		listener: make(map[string]chan bool), | 		lock: sync.RWMutex{}, | ||||||
| 		lock:     sync.Mutex{}, |  | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Get() bool { | func (a *Atomic[T]) Get() T { | ||||||
| 	a.lock.Lock() | 	a.lock.RLock() | ||||||
| 	defer a.lock.Unlock() | 	defer a.lock.RUnlock() | ||||||
| 	return a.v | 	return a.v | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Set(value bool) bool { | func (a *Atomic[T]) Set(value T) T { | ||||||
| 	a.lock.Lock() | 	a.lock.Lock() | ||||||
| 	defer a.lock.Unlock() | 	defer a.lock.Unlock() | ||||||
|  |  | ||||||
| @@ -35,79 +30,5 @@ func (a *AtomicBool) Set(value bool) bool { | |||||||
|  |  | ||||||
| 	a.v = value | 	a.v = value | ||||||
|  |  | ||||||
| 	for k, v := range a.listener { |  | ||||||
| 		select { |  | ||||||
| 		case v <- value: |  | ||||||
| 			// message sent |  | ||||||
| 		default: |  | ||||||
| 			// no receiver on channel |  | ||||||
| 			delete(a.listener, k) |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return oldValue | 	return oldValue | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Wait(waitFor bool) { |  | ||||||
| 	_ = a.WaitWithContext(context.Background(), waitFor) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (a *AtomicBool) WaitWithTimeout(timeout time.Duration, waitFor bool) error { |  | ||||||
| 	ctx, cancel := context.WithTimeout(context.Background(), timeout) |  | ||||||
| 	defer cancel() |  | ||||||
| 	return a.WaitWithContext(ctx, waitFor) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (a *AtomicBool) WaitWithContext(ctx context.Context, waitFor bool) error { |  | ||||||
| 	if err := ctx.Err(); err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if a.Get() == waitFor { |  | ||||||
| 		return nil |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	uuid, _ := langext.NewHexUUID() |  | ||||||
|  |  | ||||||
| 	waitchan := make(chan bool) |  | ||||||
|  |  | ||||||
| 	a.lock.Lock() |  | ||||||
| 	a.listener[uuid] = waitchan |  | ||||||
| 	a.lock.Unlock() |  | ||||||
| 	defer func() { |  | ||||||
| 		a.lock.Lock() |  | ||||||
| 		delete(a.listener, uuid) |  | ||||||
| 		a.lock.Unlock() |  | ||||||
| 	}() |  | ||||||
|  |  | ||||||
| 	for { |  | ||||||
| 		if err := ctx.Err(); err != nil { |  | ||||||
| 			return err |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		timeOut := 1024 * time.Millisecond |  | ||||||
|  |  | ||||||
| 		if dl, ok := ctx.Deadline(); ok { |  | ||||||
| 			timeOutMax := dl.Sub(time.Now()) |  | ||||||
| 			if timeOutMax <= 0 { |  | ||||||
| 				timeOut = 0 |  | ||||||
| 			} else if 0 < timeOutMax && timeOutMax < timeOut { |  | ||||||
| 				timeOut = timeOutMax |  | ||||||
| 			} |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if v, ok := ReadChannelWithTimeout(waitchan, timeOut); ok { |  | ||||||
| 			if v == waitFor { |  | ||||||
| 				return nil |  | ||||||
| 			} |  | ||||||
| 		} else { |  | ||||||
| 			if err := ctx.Err(); err != nil { |  | ||||||
| 				return err |  | ||||||
| 			} |  | ||||||
|  |  | ||||||
| 			if a.Get() == waitFor { |  | ||||||
| 				return nil |  | ||||||
| 			} |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
|   | |||||||
							
								
								
									
										113
									
								
								syncext/bool.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										113
									
								
								syncext/bool.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,113 @@ | |||||||
|  | package syncext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"sync" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type AtomicBool struct { | ||||||
|  | 	v        bool | ||||||
|  | 	listener map[string]chan bool | ||||||
|  | 	lock     sync.Mutex | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewAtomicBool(value bool) *AtomicBool { | ||||||
|  | 	return &AtomicBool{ | ||||||
|  | 		v:        value, | ||||||
|  | 		listener: make(map[string]chan bool), | ||||||
|  | 		lock:     sync.Mutex{}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Get() bool { | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	defer a.lock.Unlock() | ||||||
|  | 	return a.v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Set(value bool) bool { | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	defer a.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	oldValue := a.v | ||||||
|  |  | ||||||
|  | 	a.v = value | ||||||
|  |  | ||||||
|  | 	for k, v := range a.listener { | ||||||
|  | 		select { | ||||||
|  | 		case v <- value: | ||||||
|  | 			// message sent | ||||||
|  | 		default: | ||||||
|  | 			// no receiver on channel | ||||||
|  | 			delete(a.listener, k) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return oldValue | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Wait(waitFor bool) { | ||||||
|  | 	_ = a.WaitWithContext(context.Background(), waitFor) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) WaitWithTimeout(timeout time.Duration, waitFor bool) error { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), timeout) | ||||||
|  | 	defer cancel() | ||||||
|  | 	return a.WaitWithContext(ctx, waitFor) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) WaitWithContext(ctx context.Context, waitFor bool) error { | ||||||
|  | 	if err := ctx.Err(); err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if a.Get() == waitFor { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	uuid, _ := langext.NewHexUUID() | ||||||
|  |  | ||||||
|  | 	waitchan := make(chan bool) | ||||||
|  |  | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	a.listener[uuid] = waitchan | ||||||
|  | 	a.lock.Unlock() | ||||||
|  | 	defer func() { | ||||||
|  | 		a.lock.Lock() | ||||||
|  | 		delete(a.listener, uuid) | ||||||
|  | 		a.lock.Unlock() | ||||||
|  | 	}() | ||||||
|  |  | ||||||
|  | 	for { | ||||||
|  | 		if err := ctx.Err(); err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		timeOut := 1024 * time.Millisecond | ||||||
|  |  | ||||||
|  | 		if dl, ok := ctx.Deadline(); ok { | ||||||
|  | 			timeOutMax := dl.Sub(time.Now()) | ||||||
|  | 			if timeOutMax <= 0 { | ||||||
|  | 				timeOut = 0 | ||||||
|  | 			} else if 0 < timeOutMax && timeOutMax < timeOut { | ||||||
|  | 				timeOut = timeOutMax | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if v, ok := ReadChannelWithTimeout(waitchan, timeOut); ok { | ||||||
|  | 			if v == waitFor { | ||||||
|  | 				return nil | ||||||
|  | 			} | ||||||
|  | 		} else { | ||||||
|  | 			if err := ctx.Err(); err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			if a.Get() == waitFor { | ||||||
|  | 				return nil | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -14,6 +14,20 @@ func AssertEqual[T comparable](t *testing.T, actual T, expected T) { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AssertArrayEqual[T comparable](t *testing.T, actual []T, expected []T) { | ||||||
|  | 	t.Helper() | ||||||
|  | 	if len(actual) != len(expected) { | ||||||
|  | 		t.Errorf("values differ: Actual: '%v', Expected: '%v' (len %d <> %d)", actual, expected, len(actual), len(expected)) | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	for i := 0; i < len(actual); i++ { | ||||||
|  | 		if actual[i] != expected[i] { | ||||||
|  | 			t.Errorf("values differ: Actual: '%v', Expected: '%v' (at index %d)", actual, expected, i) | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
| func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) { | func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) { | ||||||
| 	t.Helper() | 	t.Helper() | ||||||
| 	if actual == expected { | 	if actual == expected { | ||||||
|   | |||||||
							
								
								
									
										21
									
								
								tst/must.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								tst/must.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | |||||||
|  | package tst | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"runtime/debug" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // Must can b used to AssertNoErr of an (T, err) function | ||||||
|  | // | ||||||
|  | // Usage: | ||||||
|  | // | ||||||
|  | // input := "123.8" | ||||||
|  | // value := tst.Must(strconv.Atoi(input))(t) | ||||||
|  | func Must[T any](v T, anerr error) func(t *testing.T) T { | ||||||
|  | 	return func(t *testing.T) T { | ||||||
|  | 		if anerr != nil { | ||||||
|  | 			t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack())) | ||||||
|  | 		} | ||||||
|  | 		return v | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -44,6 +44,8 @@ type Coll[TData any] struct { | |||||||
| 	implDataTypeMap     map[reflect.Type]map[string]fullTypeRef                  // dynamic list of fields of TData implementations (only if TData is an interface) | 	implDataTypeMap     map[reflect.Type]map[string]fullTypeRef                  // dynamic list of fields of TData implementations (only if TData is an interface) | ||||||
| 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | ||||||
| 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | ||||||
|  | 	unmarshalHooks      []func(d TData) TData                                    // called for every object after unmarshalling | ||||||
|  | 	extraModPipeline    mongo.Pipeline                                           // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Collection() *mongo.Collection { | func (c *Coll[TData]) Collection() *mongo.Collection { | ||||||
| @@ -54,14 +56,6 @@ func (c *Coll[TData]) Name() string { | |||||||
| 	return c.coll.Name() | 	return c.coll.Name() | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] { |  | ||||||
|  |  | ||||||
| 	c.EnsureInitializedReflection(example) |  | ||||||
|  |  | ||||||
| 	c.customDecoder = langext.Ptr(cdf) |  | ||||||
| 	return c |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Indexes() mongo.IndexView { | func (c *Coll[TData]) Indexes() mongo.IndexView { | ||||||
| 	return c.coll.Indexes() | 	return c.coll.Indexes() | ||||||
| } | } | ||||||
| @@ -74,6 +68,26 @@ func (c *Coll[TData]) Drop(ctx context.Context) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] { | ||||||
|  |  | ||||||
|  | 	c.EnsureInitializedReflection(example) | ||||||
|  |  | ||||||
|  | 	c.customDecoder = langext.Ptr(cdf) | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] { | ||||||
|  | 	c.unmarshalHooks = append(c.unmarshalHooks, fn) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] { | ||||||
|  | 	c.extraModPipeline = append(c.extraModPipeline, p...) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { | func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { | ||||||
|  |  | ||||||
| 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | ||||||
|   | |||||||
| @@ -2,37 +2,39 @@ package wmo | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) { | func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) { | ||||||
| 	if c.customDecoder != nil { |  | ||||||
|  |  | ||||||
| 		res, err := (*c.customDecoder)(ctx, dec) |  | ||||||
| 		if err != nil { |  | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build() |  | ||||||
| 		} |  | ||||||
| 		return res, nil |  | ||||||
|  |  | ||||||
| 	} else { |  | ||||||
|  |  | ||||||
| 	var res TData | 	var res TData | ||||||
|  | 	var err error | ||||||
|  |  | ||||||
| 		err := dec.Decode(&res) | 	if c.customDecoder != nil { | ||||||
|  | 		res, err = (*c.customDecoder)(ctx, dec) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build() | 			return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).NoLog().Build() | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		err = dec.Decode(&res) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).NoLog().Build() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, hook := range c.unmarshalHooks { | ||||||
|  | 		res = hook(res) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return res, nil | 	return res, nil | ||||||
|  |  | ||||||
| 	} |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) { | func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) { | ||||||
| 	if c.customDecoder != nil { |  | ||||||
|  |  | ||||||
| 	res := make([]TData, 0, cursor.RemainingBatchLength()) | 	res := make([]TData, 0, cursor.RemainingBatchLength()) | ||||||
|  |  | ||||||
|  | 	if c.customDecoder != nil { | ||||||
| 		for cursor.Next(ctx) { | 		for cursor.Next(ctx) { | ||||||
| 			entry, err := (*c.customDecoder)(ctx, cursor) | 			entry, err := (*c.customDecoder)(ctx, cursor) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| @@ -40,20 +42,48 @@ func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData | |||||||
| 			} | 			} | ||||||
| 			res = append(res, entry) | 			res = append(res, entry) | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		return res, nil |  | ||||||
|  |  | ||||||
| 	} else { | 	} else { | ||||||
|  |  | ||||||
| 		res := make([]TData, 0, cursor.RemainingBatchLength()) |  | ||||||
|  |  | ||||||
| 		err := cursor.All(ctx, &res) | 		err := cursor.All(ctx, &res) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build() | 			return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build() | ||||||
| 		} | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for i := 0; i < len(res); i++ { | ||||||
|  | 		for _, hook := range c.unmarshalHooks { | ||||||
|  | 			res[i] = hook(res[i]) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	return res, nil | 	return res, nil | ||||||
|  |  | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) decodeSingleOrRequery(ctx context.Context, dec Decodable) (TData, error) { | ||||||
|  | 	if c.extraModPipeline == nil { | ||||||
|  |  | ||||||
|  | 		// simple case, we can just decode the result and return it | ||||||
|  | 		return c.decodeSingle(ctx, dec) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		// annyoing case, we have a extraModPipeline and need to re-query the document such that the extraModPipeline is applied... | ||||||
|  |  | ||||||
|  | 		type genDoc struct { | ||||||
|  | 			ID any `bson:"_id"` | ||||||
|  | 		} | ||||||
|  | 		var res genDoc | ||||||
|  | 		err := dec.Decode(&res) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").NoLog().Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).NoLog().Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return *v, nil | ||||||
|  |  | ||||||
|  | 	} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,87 +0,0 @@ | |||||||
| package wmo |  | ||||||
|  |  | ||||||
| import ( |  | ||||||
| 	"go.mongodb.org/mongo-driver/bson" |  | ||||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { |  | ||||||
|  |  | ||||||
| 	cond := bson.A{} |  | ||||||
| 	sort := bson.D{} |  | ||||||
|  |  | ||||||
| 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if sortPrimary == ct.SortASC { |  | ||||||
| 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary |  | ||||||
| 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}}) |  | ||||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) |  | ||||||
| 	} else if sortPrimary == ct.SortDESC { |  | ||||||
| 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary |  | ||||||
| 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}}) |  | ||||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { |  | ||||||
|  |  | ||||||
| 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) |  | ||||||
| 		if err != nil { |  | ||||||
| 			return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if *sortSecondary == ct.SortASC { |  | ||||||
|  |  | ||||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer) |  | ||||||
| 			cond = append(cond, bson.M{"$and": bson.A{ |  | ||||||
| 				bson.M{fieldPrimary: valuePrimary}, |  | ||||||
| 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, |  | ||||||
| 			}}) |  | ||||||
|  |  | ||||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) |  | ||||||
|  |  | ||||||
| 		} else if *sortSecondary == ct.SortDESC { |  | ||||||
|  |  | ||||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older) |  | ||||||
| 			cond = append(cond, bson.M{"$and": bson.A{ |  | ||||||
| 				bson.M{fieldPrimary: valuePrimary}, |  | ||||||
| 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, |  | ||||||
| 			}}) |  | ||||||
|  |  | ||||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) |  | ||||||
|  |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	pipeline := make([]bson.D, 0, 3) |  | ||||||
|  |  | ||||||
| 	if token.Mode == ct.CTMStart { |  | ||||||
|  |  | ||||||
| 		// no gt/lt condition |  | ||||||
|  |  | ||||||
| 	} else if token.Mode == ct.CTMNormal { |  | ||||||
|  |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}}) |  | ||||||
|  |  | ||||||
| 	} else if token.Mode == ct.CTMEnd { |  | ||||||
|  |  | ||||||
| 		// false |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}}) |  | ||||||
|  |  | ||||||
| 	} else { |  | ||||||
|  |  | ||||||
| 		return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() |  | ||||||
|  |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) |  | ||||||
|  |  | ||||||
| 	if pageSize != nil { |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return pipeline, nil |  | ||||||
| } |  | ||||||
| @@ -5,9 +5,13 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo/options" | 	"go.mongodb.org/mongo-driver/mongo/options" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | ||||||
|  |  | ||||||
|  | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
| @@ -22,6 +26,9 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | ||||||
|  |  | ||||||
|  | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
| @@ -39,6 +46,9 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | ||||||
|  |  | ||||||
|  | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
|   | |||||||
							
								
								
									
										160
									
								
								wmo/queryFind.go
									
									
									
									
									
								
							
							
						
						
									
										160
									
								
								wmo/queryFind.go
									
									
									
									
									
								
							| @@ -2,69 +2,56 @@ package wmo | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
| 	"errors" |  | ||||||
| 	"go.mongodb.org/mongo-driver/bson" | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo/options" | 	"go.mongodb.org/mongo-driver/mongo/options" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, filter) |  | ||||||
| 	if err := mongoRes.Err(); err != nil { |  | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed"). |  | ||||||
| 			Str("collection", c.Name()). |  | ||||||
| 			Any("filter", filter). |  | ||||||
| 			Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, filter) |  | ||||||
|  |  | ||||||
| 	res, err := c.decodeSingle(ctx, mongoRes) |  | ||||||
| 	if errors.Is(err, mongo.ErrNoDocuments) { |  | ||||||
| 		return nil, nil |  | ||||||
| 	} |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Str("collection", c.Name()).Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return &res, nil |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id}) |  | ||||||
| 	if err := mongoRes.Err(); err != nil { |  | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed"). |  | ||||||
| 			Str("collection", c.Name()). |  | ||||||
| 			Id("id", id). |  | ||||||
| 			Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id}) |  | ||||||
|  |  | ||||||
| 	res, err := c.decodeSingle(ctx, mongoRes) |  | ||||||
| 	if errors.Is(err, mongo.ErrNoDocuments) { |  | ||||||
| 		return nil, nil |  | ||||||
| 	} |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return &res, nil |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { | func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { | ||||||
| 	cursor, err := c.coll.Find(ctx, filter, opts...) |  | ||||||
|  | 	pipeline := mongo.Pipeline{} | ||||||
|  | 	pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Sort != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$sort", Value: opt.Sort}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Skip != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$skip", Value: *opt.Skip}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Limit != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$limit", Value: *opt.Limit}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Projection != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$project", Value: opt.Projection}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	convOpts := make([]*options.AggregateOptions, 0, len(opts)) | ||||||
|  | 	for _, v := range opts { | ||||||
|  | 		vConv, err := convertFindOpt(v) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Any("opts", opts).Str("collection", c.Name()).Build() | 			return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  | 		convOpts = append(convOpts, vConv) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	cursor, err := c.coll.Aggregate(ctx, pipeline, convOpts...) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := c.decodeAll(ctx, cursor) | 	res, err := c.decodeAll(ctx, cursor) | ||||||
| @@ -74,3 +61,66 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options. | |||||||
|  |  | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // converts FindOptions to AggregateOptions | ||||||
|  | func convertFindOpt(v *options.FindOptions) (*options.AggregateOptions, error) { | ||||||
|  | 	if v == nil { | ||||||
|  | 		return nil, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := options.Aggregate() | ||||||
|  |  | ||||||
|  | 	if v.AllowDiskUse != nil { | ||||||
|  | 		r.SetAllowDiskUse(*v.AllowDiskUse) | ||||||
|  | 	} | ||||||
|  | 	if v.AllowPartialResults != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'AllowPartialResults' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.BatchSize != nil { | ||||||
|  | 		r.SetBatchSize(*v.BatchSize) | ||||||
|  | 	} | ||||||
|  | 	if v.Collation != nil { | ||||||
|  | 		r.SetCollation(v.Collation) | ||||||
|  | 	} | ||||||
|  | 	if v.Comment != nil { | ||||||
|  | 		r.SetComment(*v.Comment) | ||||||
|  | 	} | ||||||
|  | 	if v.CursorType != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'CursorType' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Hint != nil { | ||||||
|  | 		r.SetHint(v.Hint) | ||||||
|  | 	} | ||||||
|  | 	if v.Max != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Max' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.MaxAwaitTime != nil { | ||||||
|  | 		r.SetMaxAwaitTime(*v.MaxAwaitTime) | ||||||
|  | 	} | ||||||
|  | 	if v.MaxTime != nil { | ||||||
|  | 		r.SetMaxTime(*v.MaxTime) | ||||||
|  | 	} | ||||||
|  | 	if v.Min != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Min' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.NoCursorTimeout != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'NoCursorTimeout' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.OplogReplay != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'OplogReplay' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.ReturnKey != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'ReturnKey' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.ShowRecordID != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'ShowRecordID' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Snapshot != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Snapshot' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Let != nil { | ||||||
|  | 		r.SetLet(v.Let) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										93
									
								
								wmo/queryFindOne.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										93
									
								
								wmo/queryFindOne.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,93 @@ | |||||||
|  | package wmo | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"errors" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, filter, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, filter, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": id}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": id}, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowNull bool) (*TData, error) { | ||||||
|  |  | ||||||
|  | 	if len(c.extraModPipeline) == 0 { | ||||||
|  |  | ||||||
|  | 		// simple case, use mongo FindOne | ||||||
|  |  | ||||||
|  | 		mongoRes := c.coll.FindOne(ctx, filter) | ||||||
|  |  | ||||||
|  | 		res, err := c.decodeSingle(ctx, mongoRes) | ||||||
|  | 		if allowNull && errors.Is(err, mongo.ErrNoDocuments) { | ||||||
|  | 			return nil, nil | ||||||
|  | 		} | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).NoLog().Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return &res, nil | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		// complex case, we one ore more additional pipeline stages, convert to aggregation | ||||||
|  |  | ||||||
|  | 		pipeline := mongo.Pipeline{} | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}}) | ||||||
|  |  | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
|  | 		cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if cursor.Next(ctx) { | ||||||
|  | 			v, err := c.decodeSingle(ctx, cursor) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed to decode results").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
|  | 			} | ||||||
|  | 			return &v, nil | ||||||
|  | 		} else if allowNull { | ||||||
|  | 			return nil, nil | ||||||
|  | 		} else { | ||||||
|  | 			return nil, exerr.Wrap(mongo.ErrNoDocuments, "mongo-aggregation [find-one] returned no documents").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -14,9 +14,25 @@ func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, erro | |||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID}) | 	r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | // InsertOneUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is any instead of TData) | ||||||
|  | func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TData, error) { | ||||||
|  | 	insRes, err := c.coll.InsertOne(ctx, valueIn) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  | 	return *r, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | ||||||
| @@ -27,3 +43,13 @@ func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.I | |||||||
|  |  | ||||||
| 	return insRes, nil | 	return insRes, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // InsertManyUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is []any instead of []TData) | ||||||
|  | func (c *Coll[TData]) InsertManyUnchecked(ctx context.Context, valueIn []any) (*mongo.InsertManyResult, error) { | ||||||
|  | 	insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return insRes, nil | ||||||
|  | } | ||||||
|   | |||||||
| @@ -34,7 +34,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 		sortDirSecondary = nil | 		sortDirSecondary = nil | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | 	paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr. | 		return nil, ct.CursorToken{}, exerr. | ||||||
| 			Wrap(err, "failed to create pagination"). | 			Wrap(err, "failed to create pagination"). | ||||||
| @@ -50,6 +50,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, paginationPipeline...) | 	pipeline = append(pipeline, paginationPipeline...) | ||||||
|  | 	pipeline = append(pipeline, c.extraModPipeline...) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -91,11 +92,11 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 	return entities, nextToken, nil | 	return entities, nextToken, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) { | ||||||
| 	type countRes struct { | 	type countRes struct { | ||||||
| 		Count int64 `bson:"c"` | 		Count int64 `bson:"c"` | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Count(ctx context.Context, filter ct.Filter) (int64, error) { |  | ||||||
| 	pipeline := filter.FilterQuery() | 	pipeline := filter.FilterQuery() | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}}) | 	pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}}) | ||||||
| @@ -130,3 +131,83 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS | |||||||
| 	} | 	} | ||||||
| 	return data, token, count, nil | 	return data, token, count, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { | ||||||
|  |  | ||||||
|  | 	cond := bson.A{} | ||||||
|  | 	sort := bson.D{} | ||||||
|  |  | ||||||
|  | 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if sortPrimary == ct.SortASC { | ||||||
|  | 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary | ||||||
|  | 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}}) | ||||||
|  | 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||||
|  | 	} else if sortPrimary == ct.SortDESC { | ||||||
|  | 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary | ||||||
|  | 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}}) | ||||||
|  | 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { | ||||||
|  |  | ||||||
|  | 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if *sortSecondary == ct.SortASC { | ||||||
|  |  | ||||||
|  | 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer) | ||||||
|  | 			cond = append(cond, bson.M{"$and": bson.A{ | ||||||
|  | 				bson.M{fieldPrimary: valuePrimary}, | ||||||
|  | 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, | ||||||
|  | 			}}) | ||||||
|  |  | ||||||
|  | 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||||
|  |  | ||||||
|  | 		} else if *sortSecondary == ct.SortDESC { | ||||||
|  |  | ||||||
|  | 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older) | ||||||
|  | 			cond = append(cond, bson.M{"$and": bson.A{ | ||||||
|  | 				bson.M{fieldPrimary: valuePrimary}, | ||||||
|  | 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, | ||||||
|  | 			}}) | ||||||
|  |  | ||||||
|  | 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||||
|  |  | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipeline := make([]bson.D, 0, 3) | ||||||
|  |  | ||||||
|  | 	if token.Mode == ct.CTMStart { | ||||||
|  |  | ||||||
|  | 		// no gt/lt condition | ||||||
|  |  | ||||||
|  | 	} else if token.Mode == ct.CTMNormal { | ||||||
|  |  | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}}) | ||||||
|  |  | ||||||
|  | 	} else if token.Mode == ct.CTMEnd { | ||||||
|  |  | ||||||
|  | 		// false | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}}) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) | ||||||
|  |  | ||||||
|  | 	if pageSize != nil { | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return pipeline, nil | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										82
									
								
								wmo/queryPaginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										82
									
								
								wmo/queryPaginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,82 @@ | |||||||
|  | package wmo | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||||
|  | 	type totalCountResult struct { | ||||||
|  | 		Count int `bson:"count"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if page < 0 { | ||||||
|  | 		page = 1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineSort := mongo.Pipeline{} | ||||||
|  | 	pipelineFilter := mongo.Pipeline{} | ||||||
|  | 	sort := bson.D{} | ||||||
|  |  | ||||||
|  | 	if filter != nil { | ||||||
|  | 		pipelineFilter = filter.FilterQuery() | ||||||
|  | 		sort = filter.Sort() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if len(sort) != 0 { | ||||||
|  | 		pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: sort}}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelinePaginate := mongo.Pipeline{} | ||||||
|  | 	if limit != nil { | ||||||
|  | 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *limit * (page - 1)}}) | ||||||
|  | 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *limit}}) | ||||||
|  | 	} else { | ||||||
|  | 		page = 1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineCount := mongo.Pipeline{} | ||||||
|  | 	pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}}) | ||||||
|  |  | ||||||
|  | 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, c.extraModPipeline, pipelineSort) | ||||||
|  | 	pipelineTotalCount := langext.ArrConcat(pipelineFilter, pipelineCount) | ||||||
|  |  | ||||||
|  | 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	entities, err := c.decodeAll(ctx, cursorList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var tcRes totalCountResult | ||||||
|  | 	if cursorTotalCount.Next(ctx) { | ||||||
|  | 		err = cursorTotalCount.Decode(&tcRes) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		tcRes.Count = 0 // no entries in DB | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	paginationObj := pag.Pagination{ | ||||||
|  | 		Page:             page, | ||||||
|  | 		Limit:            langext.Coalesce(limit, tcRes.Count), | ||||||
|  | 		TotalPages:       pag.CalcPaginationTotalPages(tcRes.Count, langext.Coalesce(limit, tcRes.Count)), | ||||||
|  | 		TotalItems:       tcRes.Count, | ||||||
|  | 		CurrentPageCount: len(entities), | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return entities, paginationObj, nil | ||||||
|  | } | ||||||
| @@ -18,7 +18,7 @@ func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, | |||||||
| 			Build() | 			Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | 	return c.decodeSingleOrRequery(ctx, mongoRes) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error { | func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error { | ||||||
| @@ -81,5 +81,5 @@ func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, | |||||||
| 			Build() | 			Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | 	return c.decodeSingleOrRequery(ctx, mongoRes) | ||||||
| } | } | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user