Compare commits
	
		
			14 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 398ed56d32 | |||
| f3ecba3883 | |||
| 45031b05cf | |||
| 7413ea045d | |||
| 62c9a4e734 | |||
| 3a8baaa6d9 | |||
| 498785e213 | |||
| 678f95642c | |||
| dacc97e2ce | |||
| f8c0c0afa0 | |||
| 2fbd5cf965 | |||
| 75f71fe3db | |||
| ab1a1ab6f6 | |||
| 19ee5019ef | 
							
								
								
									
										6
									
								
								.idea/golinter.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.idea/golinter.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8"?> | ||||||
|  | <project version="4"> | ||||||
|  |   <component name="GoLinterSettings"> | ||||||
|  |     <option name="checkGoLinterExe" value="false" /> | ||||||
|  |   </component> | ||||||
|  | </project> | ||||||
| @@ -23,6 +23,7 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | |||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | ||||||
| | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | ||||||
|  | | pagination  | Mike       | Pagination implementation                                                                                     | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | ||||||
| | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | ||||||
|   | |||||||
										
											Binary file not shown.
										
									
								
							| @@ -1,10 +1,12 @@ | |||||||
| package bfcodegen | package bfcodegen | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -14,7 +16,7 @@ import ( | |||||||
| 	"path/filepath" | 	"path/filepath" | ||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"text/template" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type CSIDDef struct { | type CSIDDef struct { | ||||||
| @@ -30,6 +32,9 @@ var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s | |||||||
|  |  | ||||||
| var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|  | //go:embed csid-generate.template | ||||||
|  | var templateCSIDGenerateText string | ||||||
|  |  | ||||||
| func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| @@ -95,25 +100,16 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error { | |||||||
| 		return errors.New("no package name found in any file") | 		return errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtCSIDOutput(newChecksum, allIDs, pkgname)), 0o755) | 	fdata, err := format.Source([]byte(fmtCSIDOutput(newChecksum, allIDs, pkgname))) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -168,198 +164,19 @@ func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func fmtCSIDOutput(cs string, ids []CSIDDef, pkgname string) string { | func fmtCSIDOutput(cs string, ids []CSIDDef, pkgname string) string { | ||||||
| 	str := "// Code generated by id-generate.go DO NOT EDIT.\n" | 	templ := template.Must(template.New("csid-generate").Parse(templateCSIDGenerateText)) | ||||||
| 	str += "\n" |  | ||||||
| 	str += "package " + pkgname + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += `import "crypto/rand"` + "\n" | 	buffer := bytes.Buffer{} | ||||||
| 	str += `import "fmt"` + "\n" |  | ||||||
| 	str += `import "github.com/go-playground/validator/v10"` + "\n" |  | ||||||
| 	str += `import "github.com/rs/zerolog/log"` + "\n" |  | ||||||
| 	str += `import "gogs.mikescher.com/BlackForestBytes/goext/exerr"` + "\n" |  | ||||||
| 	str += `import "gogs.mikescher.com/BlackForestBytes/goext/langext"` + "\n" |  | ||||||
| 	str += `import "gogs.mikescher.com/BlackForestBytes/goext/rext"` + "\n" |  | ||||||
| 	str += `import "math/big"` + "\n" |  | ||||||
| 	str += `import "reflect"` + "\n" |  | ||||||
| 	str += `import "regexp"` + "\n" |  | ||||||
| 	str += `import "strings"` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "const ChecksumCharsetIDGenerator = \"" + cs + "\" // GoExtVersion: " + goext.GoextVersion + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "const idlen = 24\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += "const checklen = 1\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `const idCharset = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"` + "\n" |  | ||||||
| 	str += "const idCharsetLen = len(idCharset)\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += "var charSetReverseMap = generateCharsetMap()\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += "const (\n" |  | ||||||
| 	for _, iddef := range ids { |  | ||||||
| 		str += "	prefix" + iddef.Name + " = \"" + iddef.Prefix + "\"" + "\n" |  | ||||||
| 	} |  | ||||||
| 	str += ")\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += "var (\n" |  | ||||||
| 	for _, iddef := range ids { |  | ||||||
| 		str += "	regex" + iddef.Name + " = generateRegex(prefix" + iddef.Name + ")" + "\n" |  | ||||||
| 	} |  | ||||||
| 	str += ")\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += "func generateRegex(prefix string) rext.Regex {\n" |  | ||||||
| 	str += "	return rext.W(regexp.MustCompile(fmt.Sprintf(\"^%s[%s]{%d}[%s]{%d}$\", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen)))\n" |  | ||||||
| 	str += "}\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += `func generateCharsetMap() []int {` + "\n" |  | ||||||
| 	str += `	result := make([]int, 128)` + "\n" |  | ||||||
| 	str += `	for i := 0; i < len(result); i++ {` + "\n" |  | ||||||
| 	str += `		result[i] = -1` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += `	for idx, chr := range idCharset {` + "\n" |  | ||||||
| 	str += `		result[int(chr)] = idx` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += `	return result` + "\n" |  | ||||||
| 	str += `}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `func generateID(prefix string) string {` + "\n" |  | ||||||
| 	str += `	k := ""` + "\n" |  | ||||||
| 	str += `	max := big.NewInt(int64(idCharsetLen))` + "\n" |  | ||||||
| 	str += `	checksum := 0` + "\n" |  | ||||||
| 	str += `	for i := 0; i < idlen-len(prefix)-checklen; i++ {` + "\n" |  | ||||||
| 	str += `		v, err := rand.Int(rand.Reader, max)` + "\n" |  | ||||||
| 	str += `		if err != nil {` + "\n" |  | ||||||
| 	str += `			panic(err)` + "\n" |  | ||||||
| 	str += `		}` + "\n" |  | ||||||
| 	str += `		v64 := v.Int64()` + "\n" |  | ||||||
| 	str += `		k += string(idCharset[v64])` + "\n" |  | ||||||
| 	str += `		checksum = (checksum + int(v64)) % (idCharsetLen)` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += `	checkstr := string(idCharset[checksum%idCharsetLen])` + "\n" |  | ||||||
| 	str += `	return prefix + k + checkstr` + "\n" |  | ||||||
| 	str += `}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `func validateID(prefix string, value string) error {` + "\n" |  | ||||||
| 	str += `	if len(value) != idlen {` + "\n" |  | ||||||
| 	str += `		return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build()` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `	if !strings.HasPrefix(value, prefix) {` + "\n" |  | ||||||
| 	str += `		return exerr.New(exerr.TypeInvalidCSID, "id is missing the correct prefix").Str("value", value).Str("prefix", prefix).Build()` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `	checksum := 0` + "\n" |  | ||||||
| 	str += `	for i := len(prefix); i < len(value)-checklen; i++ {` + "\n" |  | ||||||
| 	str += `		ichr := int(value[i])` + "\n" |  | ||||||
| 	str += `		if ichr < 0 || ichr >= len(charSetReverseMap) || charSetReverseMap[ichr] == -1 {` + "\n" |  | ||||||
| 	str += `			return exerr.New(exerr.TypeInvalidCSID, "id contains invalid characters").Str("value", value).Build()` + "\n" |  | ||||||
| 	str += `		}` + "\n" |  | ||||||
| 	str += `		checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen)` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `	checkstr := string(idCharset[checksum%idCharsetLen])` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `	if !strings.HasSuffix(value, checkstr) {` + "\n" |  | ||||||
| 	str += `		return exerr.New(exerr.TypeInvalidCSID, "id checkstring is invalid").Str("value", value).Str("checkstr", checkstr).Build()` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `	return nil` + "\n" |  | ||||||
| 	str += `}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `func getRawData(prefix string, value string) string {` + "\n" |  | ||||||
| 	str += `	if len(value) != idlen {` + "\n" |  | ||||||
| 	str += `		return ""` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += `	return value[len(prefix) : idlen-checklen]` + "\n" |  | ||||||
| 	str += `}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `func getCheckString(prefix string, value string) string {` + "\n" |  | ||||||
| 	str += `	if len(value) != idlen {` + "\n" |  | ||||||
| 	str += `		return ""` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += `	return value[idlen-checklen:]` + "\n" |  | ||||||
| 	str += `}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `func ValidateEntityID(vfl validator.FieldLevel) bool {` + "\n" |  | ||||||
| 	str += `	if !vfl.Field().CanInterface() {` + "\n" |  | ||||||
| 	str += `		log.Error().Msgf("Failed to validate EntityID (cannot interface ?!?)")` + "\n" |  | ||||||
| 	str += `		return false` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `	ifvalue := vfl.Field().Interface()` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `	if value1, ok := ifvalue.(EntityID); ok {` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `		if vfl.Field().Type().Kind() == reflect.Pointer && langext.IsNil(value1) {` + "\n" |  | ||||||
| 	str += `			return true` + "\n" |  | ||||||
| 	str += `		}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `		if err := value1.Valid(); err != nil {` + "\n" |  | ||||||
| 	str += `			log.Debug().Msgf("Failed to validate EntityID '%s' (%s)", value1.String(), err.Error())` + "\n" |  | ||||||
| 	str += `			return false` + "\n" |  | ||||||
| 	str += `		} else {` + "\n" |  | ||||||
| 	str += `			return true` + "\n" |  | ||||||
| 	str += `		}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += `	} else {` + "\n" |  | ||||||
| 	str += `		log.Error().Msgf("Failed to validate EntityID (wrong type: %T)", ifvalue)` + "\n" |  | ||||||
| 	str += `		return false` + "\n" |  | ||||||
| 	str += `	}` + "\n" |  | ||||||
| 	str += `}` + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	for _, iddef := range ids { |  | ||||||
|  |  | ||||||
| 		str += "// ================================ " + iddef.Name + " (" + iddef.FileRelative + ") ================================" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func New" + iddef.Name + "() " + iddef.Name + " {" + "\n" |  | ||||||
| 		str += "	return " + iddef.Name + "(generateID(prefix" + iddef.Name + "))" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (id " + iddef.Name + ") Valid() error {" + "\n" |  | ||||||
| 		str += "	return validateID(prefix" + iddef.Name + ", string(id))" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (i " + iddef.Name + ") String() string {" + "\n" |  | ||||||
| 		str += "	return string(i)" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (i " + iddef.Name + ") Prefix() string {" + "\n" |  | ||||||
| 		str += "	return prefix" + iddef.Name + "" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (id " + iddef.Name + ") Raw() string {" + "\n" |  | ||||||
| 		str += "	return getRawData(prefix" + iddef.Name + ", string(id))" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (id " + iddef.Name + ") CheckString() string {" + "\n" |  | ||||||
| 		str += "	return getCheckString(prefix" + iddef.Name + ", string(id))" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (id " + iddef.Name + ") Regex() rext.Regex {" + "\n" |  | ||||||
| 		str += "	return regex" + iddef.Name + "" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
|  | 	err := templ.Execute(&buffer, langext.H{ | ||||||
|  | 		"PkgName":      pkgname, | ||||||
|  | 		"Checksum":     cs, | ||||||
|  | 		"GoextVersion": goext.GoextVersion, | ||||||
|  | 		"IDs":          ids, | ||||||
|  | 	}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return str | 	return buffer.String() | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										190
									
								
								bfcodegen/csid-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										190
									
								
								bfcodegen/csid-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,190 @@ | |||||||
|  | // Code generated by csid-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "crypto/rand" | ||||||
|  | import "crypto/sha256" | ||||||
|  | import "fmt" | ||||||
|  | import "github.com/go-playground/validator/v10" | ||||||
|  | import "github.com/rs/zerolog/log" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
|  | import "math/big" | ||||||
|  | import "reflect" | ||||||
|  | import "regexp" | ||||||
|  | import "strings" | ||||||
|  |  | ||||||
|  | const ChecksumCharsetIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | const idlen = 24 | ||||||
|  |  | ||||||
|  | const checklen = 1 | ||||||
|  |  | ||||||
|  | const idCharset = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" | ||||||
|  | const idCharsetLen = len(idCharset) | ||||||
|  |  | ||||||
|  | var charSetReverseMap = generateCharsetMap() | ||||||
|  |  | ||||||
|  | const ({{range .IDs}} | ||||||
|  | 	prefix{{.Name}} = "{{.Prefix}}" {{end}} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var ({{range .IDs}} | ||||||
|  | 	regex{{.Name}} = generateRegex(prefix{{.Name}}) {{end}} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func generateRegex(prefix string) rext.Regex { | ||||||
|  | 	return rext.W(regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen))) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateCharsetMap() []int { | ||||||
|  | 	result := make([]int, 128) | ||||||
|  | 	for i := 0; i < len(result); i++ { | ||||||
|  | 		result[i] = -1 | ||||||
|  | 	} | ||||||
|  | 	for idx, chr := range idCharset { | ||||||
|  | 		result[int(chr)] = idx | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateID(prefix string) string { | ||||||
|  | 	k := "" | ||||||
|  | 	csMax := big.NewInt(int64(idCharsetLen)) | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := 0; i < idlen-len(prefix)-checklen; i++ { | ||||||
|  | 		v, err := rand.Int(rand.Reader, csMax) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err) | ||||||
|  | 		} | ||||||
|  | 		v64 := v.Int64() | ||||||
|  | 		k += string(idCharset[v64]) | ||||||
|  | 		checksum = (checksum + int(v64)) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  | 	return prefix + k + checkstr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateIDFromSeed(prefix string, seed string) string { | ||||||
|  | 	h := sha256.New() | ||||||
|  |  | ||||||
|  | 	iddata := "" | ||||||
|  | 	for len(iddata) < idlen-len(prefix)-checklen { | ||||||
|  | 		h.Write([]byte(seed)) | ||||||
|  | 		bs := h.Sum(nil) | ||||||
|  | 		iddata += langext.NewAnyBaseConverter(idCharset).Encode(bs) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := 0; i < idlen-len(prefix)-checklen; i++ { | ||||||
|  | 		ichr := int(iddata[i]) | ||||||
|  | 		checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  |  | ||||||
|  | 	return prefix + iddata[:(idlen-len(prefix)-checklen)] + checkstr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func validateID(prefix string, value string) error { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !strings.HasPrefix(value, prefix) { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id is missing the correct prefix").Str("value", value).Str("prefix", prefix).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := len(prefix); i < len(value)-checklen; i++ { | ||||||
|  | 		ichr := int(value[i]) | ||||||
|  | 		if ichr < 0 || ichr >= len(charSetReverseMap) || charSetReverseMap[ichr] == -1 { | ||||||
|  | 			return exerr.New(exerr.TypeInvalidCSID, "id contains invalid characters").Str("value", value).Build() | ||||||
|  | 		} | ||||||
|  | 		checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  |  | ||||||
|  | 	if !strings.HasSuffix(value, checkstr) { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id checkstring is invalid").Str("value", value).Str("checkstr", checkstr).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func getRawData(prefix string, value string) string { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return "" | ||||||
|  | 	} | ||||||
|  | 	return value[len(prefix) : idlen-checklen] | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func getCheckString(prefix string, value string) string { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return "" | ||||||
|  | 	} | ||||||
|  | 	return value[idlen-checklen:] | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ValidateEntityID(vfl validator.FieldLevel) bool { | ||||||
|  | 	if !vfl.Field().CanInterface() { | ||||||
|  | 		log.Error().Msgf("Failed to validate EntityID (cannot interface ?!?)") | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ifvalue := vfl.Field().Interface() | ||||||
|  |  | ||||||
|  | 	if value1, ok := ifvalue.(EntityID); ok { | ||||||
|  |  | ||||||
|  | 		if vfl.Field().Type().Kind() == reflect.Pointer && langext.IsNil(value1) { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if err := value1.Valid(); err != nil { | ||||||
|  | 			log.Debug().Msgf("Failed to validate EntityID '%s' (%s)", value1.String(), err.Error()) | ||||||
|  | 			return false | ||||||
|  | 		} else { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  | 		log.Error().Msgf("Failed to validate EntityID (wrong type: %T)", ifvalue) | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{range .IDs}} | ||||||
|  |  | ||||||
|  | // ================================ {{.Name}} ({{.FileRelative}}) ================================ | ||||||
|  |  | ||||||
|  | func New{{.Name}}() {{.Name}} { | ||||||
|  | 	return {{.Name}}(generateID(prefix{{.Name}})) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Valid() error { | ||||||
|  | 	return validateID(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) String() string { | ||||||
|  | 	return string(i) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) Prefix() string { | ||||||
|  | 	return prefix{{.Name}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Raw() string { | ||||||
|  | 	return getRawData(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) CheckString() string { | ||||||
|  | 	return getCheckString(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Regex() rext.Regex { | ||||||
|  | 	return regex{{.Name}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
							
								
								
									
										52
									
								
								bfcodegen/csid-generate_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								bfcodegen/csid-generate_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | //go:embed _test_example.tgz | ||||||
|  | var CSIDExampleModels []byte | ||||||
|  |  | ||||||
|  | func TestGenerateCSIDSpecs(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, CSIDExampleModels, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go") | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/csid_gen.go"))(t))) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | } | ||||||
| @@ -1,10 +1,12 @@ | |||||||
| package bfcodegen | package bfcodegen | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -14,7 +16,7 @@ import ( | |||||||
| 	"path/filepath" | 	"path/filepath" | ||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"text/template" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type EnumDefVal struct { | type EnumDefVal struct { | ||||||
| @@ -39,6 +41,9 @@ var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+( | |||||||
|  |  | ||||||
| var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|  | //go:embed enum-generate.template | ||||||
|  | var templateEnumGenerateText string | ||||||
|  |  | ||||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| @@ -104,25 +109,16 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		return errors.New("no package name found in any file") | 		return errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtEnumOutput(newChecksum, allEnums, pkgname)), 0o755) | 	fdata, err := format.Source([]byte(fmtEnumOutput(newChecksum, allEnums, pkgname))) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -204,133 +200,32 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string { | func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string { | ||||||
| 	str := "// Code generated by enum-generate.go DO NOT EDIT.\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += "package " + pkgname + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n" | 	templ := template.New("enum-generate") | ||||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/enums\"" + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "const ChecksumEnumGenerator = \"" + cs + "\" // GoExtVersion: " + goext.GoextVersion + "\n" | 	templ = templ.Funcs(template.FuncMap{ | ||||||
| 	str += "\n" | 		"boolToStr": func(b bool) string { return langext.Conditional(b, "true", "false") }, | ||||||
|  | 		"deref":     func(v *string) string { return *v }, | ||||||
|  | 		"trimSpace": func(str string) string { return strings.TrimSpace(str) }, | ||||||
|  | 		"hasStr":    func(v EnumDef) bool { return v.Type == "string" }, | ||||||
|  | 		"hasDescr": func(v EnumDef) bool { | ||||||
|  | 			return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil }) | ||||||
|  | 		}, | ||||||
|  | 	}) | ||||||
|  |  | ||||||
| 	for _, enumdef := range enums { | 	templ = template.Must(templ.Parse(templateEnumGenerateText)) | ||||||
|  |  | ||||||
| 		hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil }) | 	buffer := bytes.Buffer{} | ||||||
| 		hasStr := enumdef.Type == "string" |  | ||||||
|  |  | ||||||
| 		str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n" |  | ||||||
| 		str += "//" + "\n" |  | ||||||
| 		str += "// File:       " + enumdef.FileRelative + "\n" |  | ||||||
| 		str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n" |  | ||||||
| 		str += "// DescrEnum:  " + langext.Conditional(hasDescr, "true", "false") + "\n" |  | ||||||
| 		str += "//" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n" |  | ||||||
| 		for _, v := range enumdef.Values { |  | ||||||
| 			str += "    " + v.VarName + "," + "\n" |  | ||||||
| 		} |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		if hasDescr { |  | ||||||
| 			str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n" |  | ||||||
| 			for _, v := range enumdef.Values { |  | ||||||
| 				str += "    " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n" |  | ||||||
| 			} |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n" |  | ||||||
| 		for _, v := range enumdef.Values { |  | ||||||
| 			str += "    " + v.VarName + ": \"" + v.VarName + "\"," + "\n" |  | ||||||
| 		} |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n" |  | ||||||
| 		str += "    return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n" |  | ||||||
| 		str += "    return __" + enumdef.EnumTypeName + "Values" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n" |  | ||||||
| 		str += "    return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []enums.EnumMetaValue {" + "\n" |  | ||||||
| 		str += "    return " + enumdef.EnumTypeName + "ValuesMeta()" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		if hasStr { |  | ||||||
| 			str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n" |  | ||||||
| 			str += "    return string(e)" + "\n" |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if hasDescr { |  | ||||||
| 			str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n" |  | ||||||
| 			str += "    if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n" |  | ||||||
| 			str += "        return d" + "\n" |  | ||||||
| 			str += "    }" + "\n" |  | ||||||
| 			str += "    return \"\"" + "\n" |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n" |  | ||||||
| 		str += "    if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n" |  | ||||||
| 		str += "        return d" + "\n" |  | ||||||
| 		str += "    }" + "\n" |  | ||||||
| 		str += "    return \"\"" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Meta() enums.EnumMetaValue {" + "\n" |  | ||||||
| 		if hasDescr { |  | ||||||
| 			str += "    return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())}" |  | ||||||
| 		} else { |  | ||||||
| 			str += "    return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}" |  | ||||||
| 		} |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n" |  | ||||||
| 		str += "    for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n" |  | ||||||
| 		str += "        if string(ev) == vv {" + "\n" |  | ||||||
| 		str += "            return ev, true" + "\n" |  | ||||||
| 		str += "        }" + "\n" |  | ||||||
| 		str += "    }" + "\n" |  | ||||||
| 		str += "    return \"\", false" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n" |  | ||||||
| 		str += "    return __" + enumdef.EnumTypeName + "Values" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func " + enumdef.EnumTypeName + "ValuesMeta() []enums.EnumMetaValue {" + "\n" |  | ||||||
| 		str += "    return []enums.EnumMetaValue{" + "\n" |  | ||||||
| 		for _, v := range enumdef.Values { |  | ||||||
| 			str += "        " + v.VarName + ".Meta(),\n" |  | ||||||
| 		} |  | ||||||
| 		str += "    }" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
|  | 	err := templ.Execute(&buffer, langext.H{ | ||||||
|  | 		"PkgName":      pkgname, | ||||||
|  | 		"Checksum":     cs, | ||||||
|  | 		"GoextVersion": goext.GoextVersion, | ||||||
|  | 		"Enums":        enums, | ||||||
|  | 	}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return str | 	return buffer.String() | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										97
									
								
								bfcodegen/enum-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								bfcodegen/enum-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,97 @@ | |||||||
|  | // Code generated by enum-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/enums" | ||||||
|  |  | ||||||
|  | const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | {{range .Enums}} | ||||||
|  |  | ||||||
|  | {{ $hasStr   := ( . | hasStr   ) }} | ||||||
|  | {{ $hasDescr := ( . | hasDescr ) }} | ||||||
|  |  | ||||||
|  | // ================================ {{.EnumTypeName}} ================================ | ||||||
|  | // | ||||||
|  | // File:       {{.FileRelative}} | ||||||
|  | // StringEnum: {{$hasStr   | boolToStr}} | ||||||
|  | // DescrEnum:  {{$hasDescr | boolToStr}} | ||||||
|  | // | ||||||
|  |  | ||||||
|  | var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}} | ||||||
|  | 	{{.VarName}}, {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}} | ||||||
|  | 	{{.VarName}}: "{{.Description | deref | trimSpace}}", {{end}} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | var __{{.EnumTypeName}}Varnames = map[{{.EnumTypeName}}]string{ {{range .Values}} | ||||||
|  | 	{{.VarName}}: "{{.VarName}}", {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Valid() bool { | ||||||
|  | 	return langext.InArray(e, __{{.EnumTypeName}}Values) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Values() []{{.EnumTypeName}} { | ||||||
|  | 	return __{{.EnumTypeName}}Values | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) ValuesAny() []any { | ||||||
|  | 	return langext.ArrCastToAny(__{{.EnumTypeName}}Values) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return {{.EnumTypeName}}ValuesMeta() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if $hasStr}} | ||||||
|  | func (e {{.EnumTypeName}}) String() string { | ||||||
|  | 	return string(e) | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | func (e {{.EnumTypeName}}) Description() string { | ||||||
|  | 	if d, ok := __{{.EnumTypeName}}Descriptions[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) VarName() string { | ||||||
|  | 	if d, ok := __{{.EnumTypeName}}Varnames[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | ||||||
|  |     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) { | ||||||
|  | 	for _, ev := range __{{.EnumTypeName}}Values { | ||||||
|  | 		if string(ev) == vv { | ||||||
|  | 			return ev, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func {{.EnumTypeName}}Values() []{{.EnumTypeName}} { | ||||||
|  | 	return __{{.EnumTypeName}}Values | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func {{.EnumTypeName}}ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return []enums.EnumMetaValue{ {{range .Values}} | ||||||
|  |             {{.VarName}}.Meta(), {{end}} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
| @@ -2,6 +2,7 @@ package bfcodegen | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	_ "embed" | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
| @@ -12,7 +13,7 @@ import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| //go:embed _test_example.tgz | //go:embed _test_example.tgz | ||||||
| var ExampleModels []byte | var EnumExampleModels []byte | ||||||
|  |  | ||||||
| func TestGenerateEnumSpecs(t *testing.T) { | func TestGenerateEnumSpecs(t *testing.T) { | ||||||
|  |  | ||||||
| @@ -20,7 +21,7 @@ func TestGenerateEnumSpecs(t *testing.T) { | |||||||
|  |  | ||||||
| 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
| 	err := os.WriteFile(tmpFile, ExampleModels, 0o777) | 	err := os.WriteFile(tmpFile, EnumExampleModels, 0o777) | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
| 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
| @@ -39,4 +40,13 @@ func TestGenerateEnumSpecs(t *testing.T) { | |||||||
| 	err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go") | 	err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go") | ||||||
| 	tst.AssertNoErr(t, err) | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/enums_gen.go"))(t))) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,10 +1,12 @@ | |||||||
| package bfcodegen | package bfcodegen | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -14,7 +16,7 @@ import ( | |||||||
| 	"path/filepath" | 	"path/filepath" | ||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"text/template" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type IDDef struct { | type IDDef struct { | ||||||
| @@ -29,6 +31,9 @@ var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+s | |||||||
|  |  | ||||||
| var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|  | //go:embed id-generate.template | ||||||
|  | var templateIDGenerateText string | ||||||
|  |  | ||||||
| func GenerateIDSpecs(sourceDir string, destFile string) error { | func GenerateIDSpecs(sourceDir string, destFile string) error { | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | 	files, err := os.ReadDir(sourceDir) | ||||||
| @@ -94,25 +99,16 @@ func GenerateIDSpecs(sourceDir string, destFile string) error { | |||||||
| 		return errors.New("no package name found in any file") | 		return errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtIDOutput(newChecksum, allIDs, pkgname)), 0o755) | 	fdata, err := format.Source([]byte(fmtIDOutput(newChecksum, allIDs, pkgname))) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -166,71 +162,22 @@ func processIDFile(basedir string, fn string) ([]IDDef, string, error) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func fmtIDOutput(cs string, ids []IDDef, pkgname string) string { | func fmtIDOutput(cs string, ids []IDDef, pkgname string) string { | ||||||
| 	str := "// Code generated by id-generate.go DO NOT EDIT.\n" | 	templ := template.Must(template.New("id-generate").Parse(templateIDGenerateText)) | ||||||
| 	str += "\n" |  | ||||||
| 	str += "package " + pkgname + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "import \"go.mongodb.org/mongo-driver/bson\"" + "\n" | 	buffer := bytes.Buffer{} | ||||||
| 	str += "import \"go.mongodb.org/mongo-driver/bson/bsontype\"" + "\n" |  | ||||||
| 	str += "import \"go.mongodb.org/mongo-driver/bson/primitive\"" + "\n" |  | ||||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/exerr\"" + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "const ChecksumIDGenerator = \"" + cs + "\" // GoExtVersion: " + goext.GoextVersion + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" }) | 	anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" }) | ||||||
|  |  | ||||||
| 	for _, iddef := range ids { | 	err := templ.Execute(&buffer, langext.H{ | ||||||
|  | 		"PkgName":      pkgname, | ||||||
| 		str += "// ================================ " + iddef.Name + " (" + iddef.FileRelative + ") ================================" + "\n" | 		"Checksum":     cs, | ||||||
| 		str += "" + "\n" | 		"GoextVersion": goext.GoextVersion, | ||||||
|  | 		"IDs":          ids, | ||||||
| 		str += "func (i " + iddef.Name + ") MarshalBSONValue() (bsontype.Type, []byte, error) {" + "\n" | 		"AnyDef":       anyDef, | ||||||
| 		str += "	if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil {" + "\n" | 	}) | ||||||
| 		str += "		return bson.MarshalValue(objId)" + "\n" | 	if err != nil { | ||||||
| 		str += "	} else {" + "\n" | 		panic(err) | ||||||
| 		str += "		return 0, nil, exerr.New(exerr.TypeMarshalEntityID, \"Failed to marshal " + iddef.Name + "(\"+i.String()+\") to ObjectId\").Str(\"value\", string(i)).Type(\"type\", i).Build()" + "\n" |  | ||||||
| 		str += "	}" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (i " + iddef.Name + ") String() string {" + "\n" |  | ||||||
| 		str += "	return string(i)" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (i " + iddef.Name + ") ObjID() (primitive.ObjectID, error) {" + "\n" |  | ||||||
| 		str += "	return primitive.ObjectIDFromHex(string(i))" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (i " + iddef.Name + ") Valid() bool {" + "\n" |  | ||||||
| 		str += "	_, err := primitive.ObjectIDFromHex(string(i))" + "\n" |  | ||||||
| 		str += "	return err == nil" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		if anyDef != nil { |  | ||||||
| 			str += "func (i " + iddef.Name + ") AsAny() " + anyDef.Name + " {" + "\n" |  | ||||||
| 			str += "	return " + anyDef.Name + "(i)" + "\n" |  | ||||||
| 			str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		str += "func New" + iddef.Name + "() " + iddef.Name + " {" + "\n" |  | ||||||
| 		str += "	return " + iddef.Name + "(primitive.NewObjectID().Hex())" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return str | 	return buffer.String() | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										47
									
								
								bfcodegen/id-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								bfcodegen/id-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,47 @@ | |||||||
|  | // Code generated by id-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "go.mongodb.org/mongo-driver/bson" | ||||||
|  | import "go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | import "go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  |  | ||||||
|  | const ChecksumIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | {{range .IDs}} | ||||||
|  |  | ||||||
|  | // ================================ {{.Name}} ({{.FileRelative}}) ================================ | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil { | ||||||
|  | 		return bson.MarshalValue(objId) | ||||||
|  | 	} else { | ||||||
|  | 		return 0, nil, exerr.New(exerr.TypeMarshalEntityID, "Failed to marshal {{.Name}}("+i.String()+") to ObjectId").Str("value", string(i)).Type("type", i).Build() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) String() string { | ||||||
|  | 	return string(i) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) ObjID() (primitive.ObjectID, error) { | ||||||
|  | 	return primitive.ObjectIDFromHex(string(i)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) Valid() bool { | ||||||
|  | 	_, err := primitive.ObjectIDFromHex(string(i)) | ||||||
|  | 	return err == nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if ne $.AnyDef nil}} | ||||||
|  | func (i {{.Name}}) AsAny() {{$.AnyDef.Name}} { | ||||||
|  | 	return {{$.AnyDef.Name}}(i) | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | func New{{.Name}}() {{.Name}} { | ||||||
|  | 	return {{.Name}}(primitive.NewObjectID().Hex()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
							
								
								
									
										52
									
								
								bfcodegen/id-generate_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								bfcodegen/id-generate_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | //go:embed _test_example.tgz | ||||||
|  | var IDExampleModels []byte | ||||||
|  |  | ||||||
|  | func TestGenerateIDSpecs(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, IDExampleModels, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go") | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/id_gen.go"))(t))) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | } | ||||||
| @@ -66,7 +66,6 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo | |||||||
| 		return int(version), nil, payload, false, nil, true | 		return int(version), nil, payload, false, nil, true | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// |  | ||||||
| 	if version == 2 { | 	if version == 2 { | ||||||
| 		if len(split) != 3 { | 		if len(split) != 3 { | ||||||
| 			return -1, nil, nil, false, nil, false | 			return -1, nil, nil, false, nil, false | ||||||
|   | |||||||
| @@ -4,6 +4,10 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | type RawFilter interface { | ||||||
|  | 	FilterQuery() mongo.Pipeline | ||||||
|  | } | ||||||
|  |  | ||||||
| type Filter interface { | type Filter interface { | ||||||
| 	FilterQuery() mongo.Pipeline | 	FilterQuery() mongo.Pipeline | ||||||
| 	Pagination() (string, SortDirection, string, SortDirection) | 	Pagination() (string, SortDirection, string, SortDirection) | ||||||
|   | |||||||
| @@ -275,7 +275,7 @@ func (b *Builder) Any(key string, val any) *Builder { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder { | func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder { | ||||||
| 	if val == nil { | 	if langext.IsNil(val) { | ||||||
| 		return b.addMeta(key, MDTString, "(!nil)") | 		return b.addMeta(key, MDTString, "(!nil)") | ||||||
| 	} else { | 	} else { | ||||||
| 		return b.addMeta(key, MDTString, val.String()) | 		return b.addMeta(key, MDTString, val.String()) | ||||||
|   | |||||||
| @@ -1,12 +1,15 @@ | |||||||
| package ginext | package ginext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
| 	"context" | 	"context" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"github.com/gin-gonic/gin/binding" | 	"github.com/gin-gonic/gin/binding" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"io" | ||||||
| 	"runtime/debug" | 	"runtime/debug" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -17,6 +20,7 @@ type PreContext struct { | |||||||
| 	uri     any | 	uri     any | ||||||
| 	query   any | 	query   any | ||||||
| 	body    any | 	body    any | ||||||
|  | 	rawbody *[]byte | ||||||
| 	form    any | 	form    any | ||||||
| 	header  any | 	header  any | ||||||
| 	timeout *time.Duration | 	timeout *time.Duration | ||||||
| @@ -37,6 +41,11 @@ func (pctx *PreContext) Body(body any) *PreContext { | |||||||
| 	return pctx | 	return pctx | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (pctx *PreContext) RawBody(rawbody *[]byte) *PreContext { | ||||||
|  | 	pctx.rawbody = rawbody | ||||||
|  | 	return pctx | ||||||
|  | } | ||||||
|  |  | ||||||
| func (pctx *PreContext) Form(form any) *PreContext { | func (pctx *PreContext) Form(form any) *PreContext { | ||||||
| 	pctx.form = form | 	pctx.form = form | ||||||
| 	return pctx | 	return pctx | ||||||
| @@ -90,6 +99,23 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if pctx.rawbody != nil { | ||||||
|  | 		if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok { | ||||||
|  | 			v, err := brc.BufferedAll() | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, nil, langext.Ptr(Error(err)) | ||||||
|  | 			} | ||||||
|  | 			*pctx.rawbody = v | ||||||
|  | 		} else { | ||||||
|  | 			buf := &bytes.Buffer{} | ||||||
|  | 			_, err := io.Copy(buf, pctx.ginCtx.Request.Body) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, nil, langext.Ptr(Error(err)) | ||||||
|  | 			} | ||||||
|  | 			*pctx.rawbody = buf.Bytes() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if pctx.form != nil { | 	if pctx.form != nil { | ||||||
| 		if pctx.ginCtx.ContentType() == "multipart/form-data" { | 		if pctx.ginCtx.ContentType() == "multipart/form-data" { | ||||||
| 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | ||||||
|   | |||||||
							
								
								
									
										22
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								go.mod
									
									
									
									
									
								
							| @@ -7,26 +7,26 @@ require ( | |||||||
| 	github.com/jmoiron/sqlx v1.3.5 | 	github.com/jmoiron/sqlx v1.3.5 | ||||||
| 	github.com/rs/xid v1.5.0 | 	github.com/rs/xid v1.5.0 | ||||||
| 	github.com/rs/zerolog v1.31.0 | 	github.com/rs/zerolog v1.31.0 | ||||||
| 	go.mongodb.org/mongo-driver v1.12.1 | 	go.mongodb.org/mongo-driver v1.13.0 | ||||||
| 	golang.org/x/crypto v0.14.0 | 	golang.org/x/crypto v0.15.0 | ||||||
| 	golang.org/x/sys v0.13.0 | 	golang.org/x/sys v0.14.0 | ||||||
| 	golang.org/x/term v0.13.0 | 	golang.org/x/term v0.14.0 | ||||||
| ) | ) | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/bytedance/sonic v1.10.2 // indirect | 	github.com/bytedance/sonic v1.10.2 // indirect | ||||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | ||||||
| 	github.com/chenzhuoyu/iasm v0.9.0 // indirect | 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | ||||||
| 	github.com/gabriel-vasile/mimetype v1.4.3 // indirect | 	github.com/gabriel-vasile/mimetype v1.4.3 // indirect | ||||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||||
| 	github.com/go-playground/locales v0.14.1 // indirect | 	github.com/go-playground/locales v0.14.1 // indirect | ||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| 	github.com/go-playground/validator/v10 v10.15.5 // indirect | 	github.com/go-playground/validator/v10 v10.16.0 // indirect | ||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.2 // indirect | ||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
| 	github.com/json-iterator/go v1.1.12 // indirect | 	github.com/json-iterator/go v1.1.12 // indirect | ||||||
| 	github.com/klauspost/compress v1.17.2 // indirect | 	github.com/klauspost/compress v1.17.2 // indirect | ||||||
| 	github.com/klauspost/cpuid/v2 v2.2.5 // indirect | 	github.com/klauspost/cpuid/v2 v2.2.6 // indirect | ||||||
| 	github.com/leodido/go-urn v1.2.4 // indirect | 	github.com/leodido/go-urn v1.2.4 // indirect | ||||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||||
| 	github.com/mattn/go-isatty v0.0.20 // indirect | 	github.com/mattn/go-isatty v0.0.20 // indirect | ||||||
| @@ -40,10 +40,10 @@ require ( | |||||||
| 	github.com/xdg-go/scram v1.1.2 // indirect | 	github.com/xdg-go/scram v1.1.2 // indirect | ||||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||||
| 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | ||||||
| 	golang.org/x/arch v0.5.0 // indirect | 	golang.org/x/arch v0.6.0 // indirect | ||||||
| 	golang.org/x/net v0.17.0 // indirect | 	golang.org/x/net v0.18.0 // indirect | ||||||
| 	golang.org/x/sync v0.4.0 // indirect | 	golang.org/x/sync v0.5.0 // indirect | ||||||
| 	golang.org/x/text v0.13.0 // indirect | 	golang.org/x/text v0.14.0 // indirect | ||||||
| 	google.golang.org/protobuf v1.31.0 // indirect | 	google.golang.org/protobuf v1.31.0 // indirect | ||||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||||
| ) | ) | ||||||
|   | |||||||
							
								
								
									
										22
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								go.sum
									
									
									
									
									
								
							| @@ -8,6 +8,8 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ | |||||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | ||||||
| github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo= | github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo= | ||||||
| github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
|  | github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= | ||||||
|  | github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
| github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | ||||||
| github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
| github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | ||||||
| @@ -27,6 +29,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn | |||||||
| github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | ||||||
| github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= | github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= | ||||||
| github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
|  | github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | ||||||
|  | github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||||
| @@ -54,6 +58,8 @@ github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQs | |||||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= | github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
| github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | ||||||
| github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | ||||||
| github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | ||||||
| @@ -112,15 +118,21 @@ github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/ | |||||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||||
| go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE= | go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE= | ||||||
| go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ= | go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ= | ||||||
|  | go.mongodb.org/mongo-driver v1.13.0 h1:67DgFFjYOCMWdtTEmKFpV3ffWlFnh+CYZ8ZS/tXWUfY= | ||||||
|  | go.mongodb.org/mongo-driver v1.13.0/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ= | ||||||
| golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||||
| golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= | golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y= | ||||||
| golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||||
|  | golang.org/x/arch v0.6.0 h1:S0JTfE48HbRj80+4tbvZDYsJ3tGv6BUU3XxyZ7CirAc= | ||||||
|  | golang.org/x/arch v0.6.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||||
| golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||||
| golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | ||||||
| golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | ||||||
| golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= | golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= | ||||||
| golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= | golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= | ||||||
|  | golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA= | ||||||
|  | golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= | ||||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||||
| @@ -131,10 +143,14 @@ golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos= | |||||||
| golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | ||||||
| golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= | golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= | ||||||
| golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= | ||||||
|  | golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= | ||||||
|  | golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= | ||||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= | golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= | ||||||
| golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= | golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= | ||||||
|  | golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= | ||||||
|  | golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||||
| golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||||
| golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| @@ -148,10 +164,14 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | |||||||
| golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= | golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= | ||||||
| golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
|  | golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= | ||||||
|  | golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||||
| golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||||
| golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= | golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= | ||||||
| golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= | golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= | ||||||
|  | golang.org/x/term v0.14.0 h1:LGK9IlZ8T9jvdy6cTdfKUCltatMFOehAQo9SRC46UQ8= | ||||||
|  | golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww= | ||||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| @@ -160,6 +180,8 @@ golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | |||||||
| golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | ||||||
| golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= | golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= | ||||||
| golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= | golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= | ||||||
|  | golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= | ||||||
|  | golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||||
| golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | ||||||
| golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.291" | const GoextVersion = "0.0.305" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2023-10-26T13:02:45+0200" | const GoextVersionTimestamp = "2023-11-09T09:35:56+0100" | ||||||
|   | |||||||
							
								
								
									
										178
									
								
								langext/baseAny.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										178
									
								
								langext/baseAny.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,178 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"crypto/rand" | ||||||
|  | 	"errors" | ||||||
|  | 	"math" | ||||||
|  | 	"math/big" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type AnyBaseConverter struct { | ||||||
|  | 	base    uint64 | ||||||
|  | 	charset []rune | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewAnyBaseConverter(cs string) AnyBaseConverter { | ||||||
|  | 	rcs := []rune(cs) | ||||||
|  | 	return AnyBaseConverter{ | ||||||
|  | 		base:    uint64(len(rcs)), | ||||||
|  | 		charset: rcs, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Rand(rlen int) string { | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	randMax := big.NewInt(math.MaxInt64) | ||||||
|  |  | ||||||
|  | 	r := "" | ||||||
|  |  | ||||||
|  | 	for i := 0; i < rlen; i++ { | ||||||
|  | 		v, err := rand.Int(rand.Reader, randMax) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		r += string(bc.charset[v.Mod(v, biBase).Int64()]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) EncodeUInt64(num uint64) string { | ||||||
|  | 	if num == 0 { | ||||||
|  | 		return "0" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b := "" | ||||||
|  |  | ||||||
|  | 	// loop as long the num is bigger than zero | ||||||
|  | 	for num > 0 { | ||||||
|  | 		r := num % bc.base | ||||||
|  |  | ||||||
|  | 		num -= r | ||||||
|  | 		num /= base62Base | ||||||
|  |  | ||||||
|  | 		b += string(bc.charset[int(r)]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) DecodeUInt64(str string) (uint64, error) { | ||||||
|  | 	if str == "" { | ||||||
|  | 		return 0, errors.New("empty string") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	result := uint64(0) | ||||||
|  |  | ||||||
|  | 	for _, v := range str { | ||||||
|  | 		result *= base62Base | ||||||
|  |  | ||||||
|  | 		pos := ArrFirstIndex(bc.charset, v) | ||||||
|  | 		if pos == -1 { | ||||||
|  | 			return 0, errors.New("invalid character: " + string(v)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		result += uint64(pos) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Encode(src []byte) string { | ||||||
|  | 	value := new(big.Int) | ||||||
|  | 	value.SetBytes(src) | ||||||
|  | 	return bc.EncodeBigInt(value) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) EncodeBigInt(src *big.Int) string { | ||||||
|  | 	value := new(big.Int) | ||||||
|  | 	value.Set(src) | ||||||
|  |  | ||||||
|  | 	isneg := value.Sign() < 0 | ||||||
|  |  | ||||||
|  | 	answer := "" | ||||||
|  |  | ||||||
|  | 	if isneg { | ||||||
|  | 		value.Neg(value) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	rem := new(big.Int) | ||||||
|  |  | ||||||
|  | 	for value.Sign() > 0 { | ||||||
|  | 		value.QuoRem(value, biBase, rem) | ||||||
|  | 		answer = string(bc.charset[rem.Int64()]) + answer | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if isneg { | ||||||
|  | 		return "-" + answer | ||||||
|  | 	} else { | ||||||
|  | 		return answer | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Decode(src string) ([]byte, error) { | ||||||
|  | 	value, err := bc.DecodeToBigInt(src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, err | ||||||
|  | 	} | ||||||
|  | 	return value.Bytes(), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) DecodeToBigInt(_src string) (*big.Int, error) { | ||||||
|  | 	result := new(big.Int) | ||||||
|  | 	result.SetInt64(0) | ||||||
|  |  | ||||||
|  | 	src := []rune(_src) | ||||||
|  |  | ||||||
|  | 	if len(src) == 0 { | ||||||
|  | 		return nil, errors.New("string is empty") | ||||||
|  | 	} | ||||||
|  | 	if bc.base < 2 { | ||||||
|  | 		return nil, errors.New("not enough digits") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	i := 0 | ||||||
|  |  | ||||||
|  | 	sign := new(big.Int) | ||||||
|  | 	sign.SetInt64(1) | ||||||
|  | 	if src[i] == '+' { | ||||||
|  | 		i++ | ||||||
|  | 	} else if src[i] == '-' { | ||||||
|  | 		i++ | ||||||
|  | 		sign.SetInt64(-1) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if i >= len(src) { | ||||||
|  | 		return nil, errors.New("no digits in input") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	oldResult := new(big.Int) | ||||||
|  |  | ||||||
|  | 	for ; i < len(src); i++ { | ||||||
|  | 		n := ArrFirstIndex(bc.charset, src[i]) | ||||||
|  | 		if n < 0 { | ||||||
|  | 			return nil, errors.New("invalid characters in input") | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		oldResult.Set(result) | ||||||
|  |  | ||||||
|  | 		result.Mul(result, biBase) | ||||||
|  | 		result.Add(result, big.NewInt(int64(n))) | ||||||
|  |  | ||||||
|  | 		if result.Cmp(oldResult) < 0 { | ||||||
|  | 			return nil, errors.New("overflow") | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if sign.Cmp(big.NewInt(0)) < 0 { | ||||||
|  | 		result.Neg(result) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
							
								
								
									
										80
									
								
								langext/baseAny_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								langext/baseAny_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func _anyEncStr(bc AnyBaseConverter, v string) string { | ||||||
|  | 	vr := bc.Encode([]byte(v)) | ||||||
|  | 	return vr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func _anyDecStr(bc AnyBaseConverter, v string) string { | ||||||
|  | 	vr, err := bc.Decode(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(vr) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBase58DefaultEncoding(t *testing.T) { | ||||||
|  | 	tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "Hello"), "9Ajdvzr") | ||||||
|  | 	tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in."), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBase58DefaultDecoding(t *testing.T) { | ||||||
|  | 	tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "9Ajdvzr"), "Hello") | ||||||
|  | 	tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBaseDecode(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	const ( | ||||||
|  | 		Binary  = "01" | ||||||
|  | 		Decimal = "0123456789" | ||||||
|  | 		Hex     = "0123456789ABCDEF" | ||||||
|  | 		DNA     = "ACGT" | ||||||
|  | 		Base32  = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" | ||||||
|  | 		Base58  = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" | ||||||
|  | 		Base62  = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" | ||||||
|  | 		Base64  = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" | ||||||
|  | 		Base256 = "🚀🪐☄🛰🌌🌑🌒🌓🌔🌕🌖🌗🌘🌍🌏🌎🐉☀💻🖥💾💿😂❤😍🤣😊🙏💕😭😘👍😅👏😁🔥🥰💔💖💙😢🤔😆🙄💪😉☺👌🤗💜😔😎😇🌹🤦🎉💞✌✨🤷😱😌🌸🙌😋💗💚😏💛🙂💓🤩😄😀🖤😃💯🙈👇🎶😒🤭❣😜💋👀😪😑💥🙋😞😩😡🤪👊🥳😥🤤👉💃😳✋😚😝😴🌟😬🙃🍀🌷😻😓⭐✅🥺🌈😈🤘💦✔😣🏃💐☹🎊💘😠☝😕🌺🎂🌻😐🖕💝🙊😹🗣💫💀👑🎵🤞😛🔴😤🌼😫⚽🤙☕🏆🤫👈😮🙆🍻🍃🐶💁😲🌿🧡🎁⚡🌞🎈❌✊👋😰🤨😶🤝🚶💰🍓💢🤟🙁🚨💨🤬✈🎀🍺🤓😙💟🌱😖👶🥴▶➡❓💎💸⬇😨🌚🦋😷🕺⚠🙅😟😵👎🤲🤠🤧📌🔵💅🧐🐾🍒😗🤑🌊🤯🐷☎💧😯💆👆🎤🙇🍑❄🌴💣🐸💌📍🥀🤢👅💡💩👐📸👻🤐🤮🎼🥵🚩🍎🍊👼💍📣🥂" | ||||||
|  | 	) | ||||||
|  |  | ||||||
|  | 	type TestDef struct { | ||||||
|  | 		FromCS  string | ||||||
|  | 		FromVal string | ||||||
|  | 		ToCS    string | ||||||
|  | 		ToVal   string | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defs := []TestDef{ | ||||||
|  | 		{Binary, "10100101011100000101010", Decimal, "5421098"}, | ||||||
|  | 		{Decimal, "5421098", DNA, "CCAGGTGAAGGG"}, | ||||||
|  | 		{Decimal, "5421098", DNA, "CCAGGTGAAGGG"}, | ||||||
|  | 		{Decimal, "80085", Base256, "🪐💞🔵"}, | ||||||
|  | 		{Hex, "48656C6C6C20576F526C5421", Base64, "SGVsbGwgV29SbFQh"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base32, "CIMVWGY3B7QFO32SNRPZBB"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base58, "2fUsGKQUcgQcwSqpvy6"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base62, "V34nvybdQ3m3RHk9Sr"}, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, def := range defs { | ||||||
|  |  | ||||||
|  | 		d1 := NewAnyBaseConverter(def.FromCS) | ||||||
|  | 		d2 := NewAnyBaseConverter(def.ToCS) | ||||||
|  |  | ||||||
|  | 		v1 := tst.Must(d1.Decode(def.FromVal))(t) | ||||||
|  | 		v2 := tst.Must(d2.Decode(def.ToVal))(t) | ||||||
|  |  | ||||||
|  | 		tst.AssertArrayEqual(t, v1, v2) | ||||||
|  |  | ||||||
|  | 		str2 := d2.Encode(v1) | ||||||
|  | 		tst.AssertEqual(t, str2, def.ToVal) | ||||||
|  |  | ||||||
|  | 		str1 := d1.Encode(v2) | ||||||
|  | 		tst.AssertEqual(t, str1, def.FromVal) | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										29
									
								
								pagination/filter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								pagination/filter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | |||||||
|  | package pagination | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type Filter interface { | ||||||
|  | 	FilterQuery() mongo.Pipeline | ||||||
|  | 	Pagination() (string, ct.SortDirection) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type dynamicFilter struct { | ||||||
|  | 	pipeline  mongo.Pipeline | ||||||
|  | 	sortField string | ||||||
|  | 	sortDir   ct.SortDirection | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dynamicFilter) FilterQuery() mongo.Pipeline { | ||||||
|  | 	return d.pipeline | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d dynamicFilter) Pagination() (string, ct.SortDirection) { | ||||||
|  | 	return d.sortField, d.sortDir | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CreateFilter(pipeline mongo.Pipeline, sortField string, sortdir ct.SortDirection) Filter { | ||||||
|  | 	return dynamicFilter{pipeline: pipeline, sortField: sortField, sortDir: sortdir} | ||||||
|  | } | ||||||
							
								
								
									
										16
									
								
								pagination/pagination.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								pagination/pagination.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | |||||||
|  | package pagination | ||||||
|  |  | ||||||
|  | type Pagination struct { | ||||||
|  | 	Page             int `json:"page"`            // page (first page == 1) | ||||||
|  | 	Limit            int `json:"limit"`           // max-page-size | ||||||
|  | 	TotalPages       int `json:"totalPages"`      // total page-count | ||||||
|  | 	TotalItems       int `json:"totalItems"`      // total items-count | ||||||
|  | 	CurrentPageCount int `json:"currntPageCount"` // item-count in current page ( == len(data) ) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CalcPaginationTotalPages(totalItems int, limit int) int { | ||||||
|  | 	if totalItems == 0 { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 + (totalItems-1)/limit | ||||||
|  | } | ||||||
| @@ -14,6 +14,20 @@ func AssertEqual[T comparable](t *testing.T, actual T, expected T) { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AssertArrayEqual[T comparable](t *testing.T, actual []T, expected []T) { | ||||||
|  | 	t.Helper() | ||||||
|  | 	if len(actual) != len(expected) { | ||||||
|  | 		t.Errorf("values differ: Actual: '%v', Expected: '%v' (len %d <> %d)", actual, expected, len(actual), len(expected)) | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	for i := 0; i < len(actual); i++ { | ||||||
|  | 		if actual[i] != expected[i] { | ||||||
|  | 			t.Errorf("values differ: Actual: '%v', Expected: '%v' (at index %d)", actual, expected, i) | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
| func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) { | func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) { | ||||||
| 	t.Helper() | 	t.Helper() | ||||||
| 	if actual == expected { | 	if actual == expected { | ||||||
|   | |||||||
							
								
								
									
										21
									
								
								tst/must.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								tst/must.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | |||||||
|  | package tst | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"runtime/debug" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // Must can b used to AssertNoErr of an (T, err) function | ||||||
|  | // | ||||||
|  | // Usage: | ||||||
|  | // | ||||||
|  | // input := "123.8" | ||||||
|  | // value := tst.Must(strconv.Atoi(input))(t) | ||||||
|  | func Must[T any](v T, anerr error) func(t *testing.T) T { | ||||||
|  | 	return func(t *testing.T) T { | ||||||
|  | 		if anerr != nil { | ||||||
|  | 			t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack())) | ||||||
|  | 		} | ||||||
|  | 		return v | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -44,6 +44,8 @@ type Coll[TData any] struct { | |||||||
| 	implDataTypeMap     map[reflect.Type]map[string]fullTypeRef                  // dynamic list of fields of TData implementations (only if TData is an interface) | 	implDataTypeMap     map[reflect.Type]map[string]fullTypeRef                  // dynamic list of fields of TData implementations (only if TData is an interface) | ||||||
| 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | ||||||
| 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | ||||||
|  | 	unmarshalHooks      []func(d TData) TData                                    // called for every object after unmarshalling | ||||||
|  | 	extraModPipeline    mongo.Pipeline                                           // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Collection() *mongo.Collection { | func (c *Coll[TData]) Collection() *mongo.Collection { | ||||||
| @@ -54,14 +56,6 @@ func (c *Coll[TData]) Name() string { | |||||||
| 	return c.coll.Name() | 	return c.coll.Name() | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] { |  | ||||||
|  |  | ||||||
| 	c.EnsureInitializedReflection(example) |  | ||||||
|  |  | ||||||
| 	c.customDecoder = langext.Ptr(cdf) |  | ||||||
| 	return c |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Indexes() mongo.IndexView { | func (c *Coll[TData]) Indexes() mongo.IndexView { | ||||||
| 	return c.coll.Indexes() | 	return c.coll.Indexes() | ||||||
| } | } | ||||||
| @@ -74,6 +68,26 @@ func (c *Coll[TData]) Drop(ctx context.Context) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] { | ||||||
|  |  | ||||||
|  | 	c.EnsureInitializedReflection(example) | ||||||
|  |  | ||||||
|  | 	c.customDecoder = langext.Ptr(cdf) | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] { | ||||||
|  | 	c.unmarshalHooks = append(c.unmarshalHooks, fn) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] { | ||||||
|  | 	c.extraModPipeline = append(c.extraModPipeline, p...) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { | func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { | ||||||
|  |  | ||||||
| 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | ||||||
|   | |||||||
| @@ -2,37 +2,39 @@ package wmo | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) { | func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) { | ||||||
| 	if c.customDecoder != nil { |  | ||||||
|  |  | ||||||
| 		res, err := (*c.customDecoder)(ctx, dec) | 	var res TData | ||||||
|  | 	var err error | ||||||
|  |  | ||||||
|  | 	if c.customDecoder != nil { | ||||||
|  | 		res, err = (*c.customDecoder)(ctx, dec) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build() | 			return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build() | ||||||
| 		} | 		} | ||||||
| 		return res, nil |  | ||||||
|  |  | ||||||
| 	} else { | 	} else { | ||||||
|  | 		err = dec.Decode(&res) | ||||||
| 		var res TData |  | ||||||
|  |  | ||||||
| 		err := dec.Decode(&res) |  | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build() | 			return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build() | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		return res, nil |  | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	for _, hook := range c.unmarshalHooks { | ||||||
|  | 		res = hook(res) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return res, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) { | func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) { | ||||||
|  |  | ||||||
|  | 	res := make([]TData, 0, cursor.RemainingBatchLength()) | ||||||
|  |  | ||||||
| 	if c.customDecoder != nil { | 	if c.customDecoder != nil { | ||||||
|  |  | ||||||
| 		res := make([]TData, 0, cursor.RemainingBatchLength()) |  | ||||||
|  |  | ||||||
| 		for cursor.Next(ctx) { | 		for cursor.Next(ctx) { | ||||||
| 			entry, err := (*c.customDecoder)(ctx, cursor) | 			entry, err := (*c.customDecoder)(ctx, cursor) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| @@ -40,20 +42,48 @@ func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData | |||||||
| 			} | 			} | ||||||
| 			res = append(res, entry) | 			res = append(res, entry) | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		return res, nil |  | ||||||
|  |  | ||||||
| 	} else { | 	} else { | ||||||
|  |  | ||||||
| 		res := make([]TData, 0, cursor.RemainingBatchLength()) |  | ||||||
|  |  | ||||||
| 		err := cursor.All(ctx, &res) | 		err := cursor.All(ctx, &res) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build() | 			return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build() | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		return res, nil |  | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	for i := 0; i < len(res); i++ { | ||||||
|  | 		for _, hook := range c.unmarshalHooks { | ||||||
|  | 			res[i] = hook(res[i]) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return res, nil | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) decodeSingleOrRequery(ctx context.Context, dec Decodable) (TData, error) { | ||||||
|  | 	if c.extraModPipeline == nil { | ||||||
|  |  | ||||||
|  | 		// simple case, we can just decode the result and return it | ||||||
|  | 		return c.decodeSingle(ctx, dec) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		// annyoing case, we have a extraModPipeline and need to re-query the document such that the extraModPipeline is applied... | ||||||
|  |  | ||||||
|  | 		type genDoc struct { | ||||||
|  | 			ID any `bson:"_id"` | ||||||
|  | 		} | ||||||
|  | 		var res genDoc | ||||||
|  | 		err := dec.Decode(&res) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return *v, nil | ||||||
|  |  | ||||||
|  | 	} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,87 +0,0 @@ | |||||||
| package wmo |  | ||||||
|  |  | ||||||
| import ( |  | ||||||
| 	"go.mongodb.org/mongo-driver/bson" |  | ||||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { |  | ||||||
|  |  | ||||||
| 	cond := bson.A{} |  | ||||||
| 	sort := bson.D{} |  | ||||||
|  |  | ||||||
| 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if sortPrimary == ct.SortASC { |  | ||||||
| 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary |  | ||||||
| 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}}) |  | ||||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) |  | ||||||
| 	} else if sortPrimary == ct.SortDESC { |  | ||||||
| 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary |  | ||||||
| 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}}) |  | ||||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { |  | ||||||
|  |  | ||||||
| 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) |  | ||||||
| 		if err != nil { |  | ||||||
| 			return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if *sortSecondary == ct.SortASC { |  | ||||||
|  |  | ||||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer) |  | ||||||
| 			cond = append(cond, bson.M{"$and": bson.A{ |  | ||||||
| 				bson.M{fieldPrimary: valuePrimary}, |  | ||||||
| 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, |  | ||||||
| 			}}) |  | ||||||
|  |  | ||||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) |  | ||||||
|  |  | ||||||
| 		} else if *sortSecondary == ct.SortDESC { |  | ||||||
|  |  | ||||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older) |  | ||||||
| 			cond = append(cond, bson.M{"$and": bson.A{ |  | ||||||
| 				bson.M{fieldPrimary: valuePrimary}, |  | ||||||
| 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, |  | ||||||
| 			}}) |  | ||||||
|  |  | ||||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) |  | ||||||
|  |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	pipeline := make([]bson.D, 0, 3) |  | ||||||
|  |  | ||||||
| 	if token.Mode == ct.CTMStart { |  | ||||||
|  |  | ||||||
| 		// no gt/lt condition |  | ||||||
|  |  | ||||||
| 	} else if token.Mode == ct.CTMNormal { |  | ||||||
|  |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}}) |  | ||||||
|  |  | ||||||
| 	} else if token.Mode == ct.CTMEnd { |  | ||||||
|  |  | ||||||
| 		// false |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}}) |  | ||||||
|  |  | ||||||
| 	} else { |  | ||||||
|  |  | ||||||
| 		return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() |  | ||||||
|  |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) |  | ||||||
|  |  | ||||||
| 	if pageSize != nil { |  | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return pipeline, nil |  | ||||||
| } |  | ||||||
| @@ -5,9 +5,13 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo/options" | 	"go.mongodb.org/mongo-driver/mongo/options" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | ||||||
|  |  | ||||||
|  | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
| @@ -22,6 +26,9 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | ||||||
|  |  | ||||||
|  | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
| @@ -39,6 +46,9 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | ||||||
|  |  | ||||||
|  | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | 		return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build() | ||||||
|   | |||||||
| @@ -2,69 +2,22 @@ package wmo | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
| 	"errors" |  | ||||||
| 	"go.mongodb.org/mongo-driver/bson" | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo/options" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { | func (c *Coll[TData]) Find(ctx context.Context, filter bson.M) ([]TData, error) { | ||||||
| 	mongoRes := c.coll.FindOne(ctx, filter) |  | ||||||
| 	if err := mongoRes.Err(); err != nil { |  | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed"). |  | ||||||
| 			Str("collection", c.Name()). |  | ||||||
| 			Any("filter", filter). |  | ||||||
| 			Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | 	pipeline := mongo.Pipeline{} | ||||||
| } | 	pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) { | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
| 	mongoRes := c.coll.FindOne(ctx, filter) |  | ||||||
|  |  | ||||||
| 	res, err := c.decodeSingle(ctx, mongoRes) | 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 	if errors.Is(err, mongo.ErrNoDocuments) { |  | ||||||
| 		return nil, nil |  | ||||||
| 	} |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return &res, nil |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id}) |  | ||||||
| 	if err := mongoRes.Err(); err != nil { |  | ||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed"). |  | ||||||
| 			Str("collection", c.Name()). |  | ||||||
| 			Id("id", id). |  | ||||||
| 			Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) { |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id}) |  | ||||||
|  |  | ||||||
| 	res, err := c.decodeSingle(ctx, mongoRes) |  | ||||||
| 	if errors.Is(err, mongo.ErrNoDocuments) { |  | ||||||
| 		return nil, nil |  | ||||||
| 	} |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return &res, nil |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { |  | ||||||
| 	cursor, err := c.coll.Find(ctx, filter, opts...) |  | ||||||
| 	if err != nil { |  | ||||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Any("opts", opts).Str("collection", c.Name()).Build() |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := c.decodeAll(ctx, cursor) | 	res, err := c.decodeAll(ctx, cursor) | ||||||
|   | |||||||
							
								
								
									
										93
									
								
								wmo/queryFindOne.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										93
									
								
								wmo/queryFindOne.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,93 @@ | |||||||
|  | package wmo | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"errors" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, filter, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, filter, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": id}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) { | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": id}, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowNull bool) (*TData, error) { | ||||||
|  |  | ||||||
|  | 	if len(c.extraModPipeline) == 0 { | ||||||
|  |  | ||||||
|  | 		// simple case, use mongo FindOne | ||||||
|  |  | ||||||
|  | 		mongoRes := c.coll.FindOne(ctx, filter) | ||||||
|  |  | ||||||
|  | 		res, err := c.decodeSingle(ctx, mongoRes) | ||||||
|  | 		if allowNull && errors.Is(err, mongo.ErrNoDocuments) { | ||||||
|  | 			return nil, nil | ||||||
|  | 		} | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return &res, nil | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		// complex case, we one ore more additional pipeline stages, convert to aggregation | ||||||
|  |  | ||||||
|  | 		pipeline := mongo.Pipeline{} | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}}) | ||||||
|  |  | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
|  | 		cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if cursor.Next(ctx) { | ||||||
|  | 			v, err := c.decodeSingle(ctx, cursor) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
|  | 			} | ||||||
|  | 			return &v, nil | ||||||
|  | 		} else if allowNull { | ||||||
|  | 			return nil, nil | ||||||
|  | 		} else { | ||||||
|  | 			return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -14,9 +14,25 @@ func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, erro | |||||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID}) | 	r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | // InsertOneUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is any instead of TData) | ||||||
|  | func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TData, error) { | ||||||
|  | 	insRes, err := c.coll.InsertOne(ctx, valueIn) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  | 	return *r, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | ||||||
|   | |||||||
| @@ -34,7 +34,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 		sortDirSecondary = nil | 		sortDirSecondary = nil | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | 	paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr. | 		return nil, ct.CursorToken{}, exerr. | ||||||
| 			Wrap(err, "failed to create pagination"). | 			Wrap(err, "failed to create pagination"). | ||||||
| @@ -50,6 +50,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, paginationPipeline...) | 	pipeline = append(pipeline, paginationPipeline...) | ||||||
|  | 	pipeline = append(pipeline, c.extraModPipeline...) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -91,11 +92,11 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 	return entities, nextToken, nil | 	return entities, nextToken, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| type countRes struct { | func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) { | ||||||
| 	Count int64 `bson:"c"` | 	type countRes struct { | ||||||
| } | 		Count int64 `bson:"c"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Count(ctx context.Context, filter ct.Filter) (int64, error) { |  | ||||||
| 	pipeline := filter.FilterQuery() | 	pipeline := filter.FilterQuery() | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}}) | 	pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}}) | ||||||
| @@ -130,3 +131,83 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS | |||||||
| 	} | 	} | ||||||
| 	return data, token, count, nil | 	return data, token, count, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { | ||||||
|  |  | ||||||
|  | 	cond := bson.A{} | ||||||
|  | 	sort := bson.D{} | ||||||
|  |  | ||||||
|  | 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if sortPrimary == ct.SortASC { | ||||||
|  | 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary | ||||||
|  | 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}}) | ||||||
|  | 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||||
|  | 	} else if sortPrimary == ct.SortDESC { | ||||||
|  | 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary | ||||||
|  | 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}}) | ||||||
|  | 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { | ||||||
|  |  | ||||||
|  | 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if *sortSecondary == ct.SortASC { | ||||||
|  |  | ||||||
|  | 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer) | ||||||
|  | 			cond = append(cond, bson.M{"$and": bson.A{ | ||||||
|  | 				bson.M{fieldPrimary: valuePrimary}, | ||||||
|  | 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, | ||||||
|  | 			}}) | ||||||
|  |  | ||||||
|  | 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||||
|  |  | ||||||
|  | 		} else if *sortSecondary == ct.SortDESC { | ||||||
|  |  | ||||||
|  | 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older) | ||||||
|  | 			cond = append(cond, bson.M{"$and": bson.A{ | ||||||
|  | 				bson.M{fieldPrimary: valuePrimary}, | ||||||
|  | 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, | ||||||
|  | 			}}) | ||||||
|  |  | ||||||
|  | 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||||
|  |  | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipeline := make([]bson.D, 0, 3) | ||||||
|  |  | ||||||
|  | 	if token.Mode == ct.CTMStart { | ||||||
|  |  | ||||||
|  | 		// no gt/lt condition | ||||||
|  |  | ||||||
|  | 	} else if token.Mode == ct.CTMNormal { | ||||||
|  |  | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}}) | ||||||
|  |  | ||||||
|  | 	} else if token.Mode == ct.CTMEnd { | ||||||
|  |  | ||||||
|  | 		// false | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}}) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build() | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) | ||||||
|  |  | ||||||
|  | 	if pageSize != nil { | ||||||
|  | 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return pipeline, nil | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										86
									
								
								wmo/queryPaginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										86
									
								
								wmo/queryPaginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,86 @@ | |||||||
|  | package wmo | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||||
|  | 	type totalCountResult struct { | ||||||
|  | 		Count int `bson:"count"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if page < 0 { | ||||||
|  | 		page = 1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineSort := mongo.Pipeline{} | ||||||
|  | 	pipelineFilter := mongo.Pipeline{} | ||||||
|  | 	pf1 := "_id" | ||||||
|  | 	pd1 := ct.SortASC | ||||||
|  |  | ||||||
|  | 	if filter != nil { | ||||||
|  | 		pipelineFilter = filter.FilterQuery() | ||||||
|  | 		pf1, pd1 = filter.Pagination() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if pd1 == ct.SortASC { | ||||||
|  | 		pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: bson.D{{Key: pf1, Value: +1}}}}) | ||||||
|  | 	} else if pd1 == ct.SortDESC { | ||||||
|  | 		pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: bson.D{{Key: pf1, Value: -1}}}}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelinePaginate := mongo.Pipeline{} | ||||||
|  | 	if limit != nil { | ||||||
|  | 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *limit * (page - 1)}}) | ||||||
|  | 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *limit}}) | ||||||
|  | 	} else { | ||||||
|  | 		page = 1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineCount := mongo.Pipeline{} | ||||||
|  | 	pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$count", Value: "count"}}) | ||||||
|  |  | ||||||
|  | 	pipelineList := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelinePaginate, c.extraModPipeline) | ||||||
|  | 	pipelineTotalCount := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelineCount) | ||||||
|  |  | ||||||
|  | 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	entities, err := c.decodeAll(ctx, cursorList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var tcRes totalCountResult | ||||||
|  | 	if cursorTotalCount.Next(ctx) { | ||||||
|  | 		err = cursorTotalCount.Decode(&tcRes) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		tcRes.Count = 0 // no entries in DB | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	paginationObj := pag.Pagination{ | ||||||
|  | 		Page:             page, | ||||||
|  | 		Limit:            langext.Coalesce(limit, tcRes.Count), | ||||||
|  | 		TotalPages:       pag.CalcPaginationTotalPages(tcRes.Count, langext.Coalesce(limit, tcRes.Count)), | ||||||
|  | 		TotalItems:       tcRes.Count, | ||||||
|  | 		CurrentPageCount: len(entities), | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return entities, paginationObj, nil | ||||||
|  | } | ||||||
| @@ -18,7 +18,7 @@ func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, | |||||||
| 			Build() | 			Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | 	return c.decodeSingleOrRequery(ctx, mongoRes) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error { | func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error { | ||||||
| @@ -81,5 +81,5 @@ func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, | |||||||
| 			Build() | 			Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return c.decodeSingle(ctx, mongoRes) | 	return c.decodeSingleOrRequery(ctx, mongoRes) | ||||||
| } | } | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user