Compare commits
	
		
			12 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| c338d23070 | |||
| 1fbae343a4 | |||
| 31418bf0e6 | |||
| 6d45f6f667 | |||
| f610a2202c | |||
| 2807299d46 | |||
| e872dbccec | |||
| 9daf71e2ed | |||
| fe278f7772 | |||
| 8ebda6fb3a | |||
| b0d3ce8c1c | |||
| 021465e524 | 
							
								
								
									
										4
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								Makefile
									
									
									
									
									
								
							| @@ -3,7 +3,9 @@ run: | ||||
| 	echo "This is a library - can't be run" && false | ||||
|  | ||||
| test: | ||||
| 	go test ./... | ||||
| 	# go test ./... | ||||
| 	which gotestsum || go install gotest.tools/gotestsum@latest | ||||
| 	gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test" | ||||
|  | ||||
| version: | ||||
| 	_data/version.sh | ||||
							
								
								
									
										32
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										32
									
								
								README.md
									
									
									
									
									
								
							| @@ -6,3 +6,35 @@ A collection of general & useful library methods | ||||
| This should not have any heavy dependencies (gin, mongo, etc) and add missing basic language features... | ||||
|  | ||||
| Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | ||||
|  | ||||
|  | ||||
| ### Packages: | ||||
|  | ||||
| | Name        | Maintainer | Description                                                                                                   | | ||||
| |-------------|------------|---------------------------------------------------------------------------------------------------------------| | ||||
| | langext     | Mike       | General uttility/helper functions, (everything thats missing from go standard library)                        | | ||||
| | mathext     | Mike       | Utility/Helper functions for math                                                                             | | ||||
| | cryptext    | Mike       | Utility/Helper functions for encryption                                                                       | | ||||
| | syncext     | Mike       | Utility/Helper funtions for multi-threading / mutex / channels                                                | | ||||
| | dataext     | Mike       | Various useful data structures                                                                                | | ||||
| | zipext      | Mike       | Utility for zip/gzip/tar etc                                                                                  | | ||||
| |             |            |                                                                                                               | | ||||
| | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | ||||
| | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | ||||
| |             |            |                                                                                                               | | ||||
| | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | ||||
| | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | ||||
| | confext     | Mike       | Parses environment configuration into structs                                                                 | | ||||
| | cmdext      | Mike       | Runner for external commands/processes                                                                        | | ||||
| |             |            |                                                                                                               | | ||||
| | sq          | Mike       | Utility functions for sql based databases                                                                     | | ||||
| | tst         | Mike       | Utility functions for unit tests                                                                              | | ||||
| |             |            |                                                                                                               | | ||||
| | rfctime     | Mike       | Classes for time seriallization, with different marshallign method for mongo and json                         | | ||||
| | gojson      | Mike       | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps | | ||||
| |             |            |                                                                                                               | | ||||
| | bfcodegen   | Mike       | Various codegen tools (run via go generate)                                                                   | | ||||
| |             |            |                                                                                                               | | ||||
| | rext        | Mike       | Regex Wrapper, wraps regexp with a better interface                                                           | | ||||
| | wmo         | Mike       | Mongo Wrapper, wraps mongodb with a better interface                                                          | | ||||
| |             |            |                                                                                                               | | ||||
							
								
								
									
										13
									
								
								TODO.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								TODO.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
|  | ||||
|  | ||||
|  - cronext | ||||
|  | ||||
|  - cursortoken | ||||
|  | ||||
|  - typed/geenric mongo wrapper | ||||
|  | ||||
|  - error package | ||||
|  | ||||
| - rfctime.DateOnly | ||||
| - rfctime.HMSTimeOnly | ||||
| - rfctime.NanoTimeOnly | ||||
							
								
								
									
										318
									
								
								bfcodegen/enum-generate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										318
									
								
								bfcodegen/enum-generate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,318 @@ | ||||
| package bfcodegen | ||||
|  | ||||
| import ( | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||
| 	"io" | ||||
| 	"os" | ||||
| 	"regexp" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type EnumDefVal struct { | ||||
| 	VarName     string | ||||
| 	Value       string | ||||
| 	Description *string | ||||
| } | ||||
|  | ||||
| type EnumDef struct { | ||||
| 	File         string | ||||
| 	EnumTypeName string | ||||
| 	Type         string | ||||
| 	Values       []EnumDefVal | ||||
| } | ||||
|  | ||||
| var rexPackage = rext.W(regexp.MustCompile("^package\\s+(?P<name>[A-Za-z0-9_]+)\\s*$")) | ||||
|  | ||||
| var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*//\\s*(@enum:type).*$")) | ||||
|  | ||||
| var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$")) | ||||
|  | ||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | ||||
|  | ||||
| 	files, err := os.ReadDir(sourceDir) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	allEnums := make([]EnumDef, 0) | ||||
|  | ||||
| 	pkgname := "" | ||||
|  | ||||
| 	for _, f := range files { | ||||
| 		if !strings.HasSuffix(f.Name(), ".go") { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		fmt.Printf("========= %s =========\n\n", f.Name()) | ||||
| 		fileEnums, pn, err := processFile(f.Name()) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
|  | ||||
| 		fmt.Printf("\n") | ||||
|  | ||||
| 		allEnums = append(allEnums, fileEnums...) | ||||
|  | ||||
| 		if pn != "" { | ||||
| 			pkgname = pn | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if pkgname == "" { | ||||
| 		return errors.New("no package name found in any file") | ||||
| 	} | ||||
|  | ||||
| 	err = os.WriteFile(destFile, []byte(fmtOutput(allEnums, pkgname)), 0o755) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if res.CommandTimedOut { | ||||
| 		fmt.Println(res.StdCombined) | ||||
| 		return errors.New("go fmt timed out") | ||||
| 	} | ||||
| 	if res.ExitCode != 0 { | ||||
| 		fmt.Println(res.StdCombined) | ||||
| 		return errors.New("go fmt did not succeed") | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func processFile(fn string) ([]EnumDef, string, error) { | ||||
| 	file, err := os.Open(fn) | ||||
| 	if err != nil { | ||||
| 		return nil, "", err | ||||
| 	} | ||||
|  | ||||
| 	defer func() { _ = file.Close() }() | ||||
|  | ||||
| 	bin, err := io.ReadAll(file) | ||||
| 	if err != nil { | ||||
| 		return nil, "", err | ||||
| 	} | ||||
|  | ||||
| 	lines := strings.Split(string(bin), "\n") | ||||
|  | ||||
| 	enums := make([]EnumDef, 0) | ||||
|  | ||||
| 	pkgname := "" | ||||
|  | ||||
| 	for i, line := range lines { | ||||
| 		if i == 0 && strings.HasPrefix(line, "// Code generated by") { | ||||
| 			break | ||||
| 		} | ||||
|  | ||||
| 		if match, ok := rexPackage.MatchFirst(line); i == 0 && ok { | ||||
| 			pkgname = match.GroupByName("name").Value() | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		if match, ok := rexEnumDef.MatchFirst(line); ok { | ||||
| 			def := EnumDef{ | ||||
| 				File:         fn, | ||||
| 				EnumTypeName: match.GroupByName("name").Value(), | ||||
| 				Type:         match.GroupByName("type").Value(), | ||||
| 				Values:       make([]EnumDefVal, 0), | ||||
| 			} | ||||
| 			enums = append(enums, def) | ||||
| 			fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | ||||
| 		} | ||||
|  | ||||
| 		if match, ok := rexValueDef.MatchFirst(line); ok { | ||||
| 			typename := match.GroupByName("type").Value() | ||||
| 			def := EnumDefVal{ | ||||
| 				VarName:     match.GroupByName("name").Value(), | ||||
| 				Value:       match.GroupByName("value").Value(), | ||||
| 				Description: match.GroupByNameOrEmpty("descr").ValueOrNil(), | ||||
| 			} | ||||
|  | ||||
| 			found := false | ||||
| 			for i, v := range enums { | ||||
| 				if v.EnumTypeName == typename { | ||||
| 					enums[i].Values = append(enums[i].Values, def) | ||||
| 					found = true | ||||
| 					if def.Description != nil { | ||||
| 						fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | ||||
| 					} else { | ||||
| 						fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | ||||
| 					} | ||||
| 					break | ||||
| 				} | ||||
| 			} | ||||
| 			if !found { | ||||
| 				fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return enums, pkgname, nil | ||||
| } | ||||
|  | ||||
| func fmtOutput(enums []EnumDef, pkgname string) string { | ||||
| 	str := "// Code generated by permissions_gen.sh DO NOT EDIT.\n" | ||||
| 	str += "\n" | ||||
| 	str += "package " + pkgname + "\n" | ||||
| 	str += "\n" | ||||
|  | ||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n" | ||||
| 	str += "\n" | ||||
|  | ||||
| 	str += "type Enum interface {" + "\n" | ||||
| 	str += "    Valid() bool" + "\n" | ||||
| 	str += "    ValuesAny() []any" + "\n" | ||||
| 	str += "    ValuesMeta() []EnumMetaValue" + "\n" | ||||
| 	str += "    VarName() string" + "\n" | ||||
| 	str += "}" + "\n" | ||||
| 	str += "" + "\n" | ||||
|  | ||||
| 	str += "type StringEnum interface {" + "\n" | ||||
| 	str += "    Enum" + "\n" | ||||
| 	str += "    String() string" + "\n" | ||||
| 	str += "}" + "\n" | ||||
| 	str += "" + "\n" | ||||
|  | ||||
| 	str += "type DescriptionEnum interface {" + "\n" | ||||
| 	str += "    Enum" + "\n" | ||||
| 	str += "    Description() string" + "\n" | ||||
| 	str += "}" + "\n" | ||||
| 	str += "\n" | ||||
|  | ||||
| 	str += "type EnumMetaValue struct {" + "\n" | ||||
| 	str += "    VarName     string  `json:\"varName\"`" + "\n" | ||||
| 	str += "    Value       any     `json:\"value\"`" + "\n" | ||||
| 	str += "    Description *string `json:\"description\"`" + "\n" | ||||
| 	str += "}" + "\n" | ||||
| 	str += "\n" | ||||
|  | ||||
| 	for _, enumdef := range enums { | ||||
|  | ||||
| 		hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil }) | ||||
| 		hasStr := enumdef.Type == "string" | ||||
|  | ||||
| 		str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n" | ||||
| 		str += "//" + "\n" | ||||
| 		str += "// File:       " + enumdef.File + "\n" | ||||
| 		str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n" | ||||
| 		str += "// DescrEnum:  " + langext.Conditional(hasDescr, "true", "false") + "\n" | ||||
| 		str += "//" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n" | ||||
| 		for _, v := range enumdef.Values { | ||||
| 			str += "    " + v.VarName + "," + "\n" | ||||
| 		} | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		if hasDescr { | ||||
| 			str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n" | ||||
| 			for _, v := range enumdef.Values { | ||||
| 				str += "    " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n" | ||||
| 			} | ||||
| 			str += "}" + "\n" | ||||
| 			str += "" + "\n" | ||||
| 		} | ||||
|  | ||||
| 		str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n" | ||||
| 		for _, v := range enumdef.Values { | ||||
| 			str += "    " + v.VarName + ": \"" + v.VarName + "\"," + "\n" | ||||
| 		} | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n" | ||||
| 		str += "    return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n" | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n" | ||||
| 		str += "    return __" + enumdef.EnumTypeName + "Values" + "\n" | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n" | ||||
| 		str += "    return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n" | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []EnumMetaValue {" + "\n" | ||||
| 		str += "    return []EnumMetaValue{" + "\n" | ||||
| 		for _, v := range enumdef.Values { | ||||
| 			if hasDescr { | ||||
| 				str += "        " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n" | ||||
| 			} else { | ||||
| 				str += "        " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n" | ||||
| 			} | ||||
| 		} | ||||
| 		str += "    }" + "\n" | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		if hasStr { | ||||
| 			str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n" | ||||
| 			str += "    return string(e)" + "\n" | ||||
| 			str += "}" + "\n" | ||||
| 			str += "" + "\n" | ||||
| 		} | ||||
|  | ||||
| 		if hasDescr { | ||||
| 			str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n" | ||||
| 			str += "    if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n" | ||||
| 			str += "        return d" + "\n" | ||||
| 			str += "    }" + "\n" | ||||
| 			str += "    return \"\"" + "\n" | ||||
| 			str += "}" + "\n" | ||||
| 			str += "" + "\n" | ||||
| 		} | ||||
|  | ||||
| 		str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n" | ||||
| 		str += "    if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n" | ||||
| 		str += "        return d" + "\n" | ||||
| 		str += "    }" + "\n" | ||||
| 		str += "    return \"\"" + "\n" | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n" | ||||
| 		str += "    for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n" | ||||
| 		str += "        if string(ev) == vv {" + "\n" | ||||
| 		str += "            return ev, true" + "\n" | ||||
| 		str += "        }" + "\n" | ||||
| 		str += "    }" + "\n" | ||||
| 		str += "    return \"\", false" + "\n" | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n" | ||||
| 		str += "    return __" + enumdef.EnumTypeName + "Values" + "\n" | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 		str += "func " + enumdef.EnumTypeName + "ValuesMeta() []EnumMetaValue {" + "\n" | ||||
| 		str += "    return []EnumMetaValue{" + "\n" | ||||
| 		for _, v := range enumdef.Values { | ||||
| 			if hasDescr { | ||||
| 				str += "        " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n" | ||||
| 			} else { | ||||
| 				str += "        " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n" | ||||
| 			} | ||||
| 		} | ||||
| 		str += "    }" + "\n" | ||||
| 		str += "}" + "\n" | ||||
| 		str += "" + "\n" | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	return str | ||||
| } | ||||
| @@ -8,6 +8,7 @@ import ( | ||||
| 	"os" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| @@ -172,6 +173,20 @@ func parseEnvToValue(envval string, fullEnvKey string, rvtype reflect.Type) (ref | ||||
|  | ||||
| 		return envcvl, nil | ||||
|  | ||||
| 	} else if rvtype.ConvertibleTo(reflect.TypeOf(false)) { | ||||
|  | ||||
| 		if strings.TrimSpace(strings.ToLower(envval)) == "true" { | ||||
| 			return reflect.ValueOf(true).Convert(rvtype), nil | ||||
| 		} else if strings.TrimSpace(strings.ToLower(envval)) == "false" { | ||||
| 			return reflect.ValueOf(true).Convert(rvtype), nil | ||||
| 		} else if strings.TrimSpace(strings.ToLower(envval)) == "1" { | ||||
| 			return reflect.ValueOf(false).Convert(rvtype), nil | ||||
| 		} else if strings.TrimSpace(strings.ToLower(envval)) == "0" { | ||||
| 			return reflect.ValueOf(false).Convert(rvtype), nil | ||||
| 		} else { | ||||
| 			return reflect.Value{}, errors.New(fmt.Sprintf("Failed to parse env-config variable '%s' to <%s, ,bool> (value := '%s')", rvtype.Name(), fullEnvKey, envval)) | ||||
| 		} | ||||
|  | ||||
| 	} else if rvtype.ConvertibleTo(reflect.TypeOf("")) { | ||||
|  | ||||
| 		envcvl := reflect.ValueOf(envval).Convert(rvtype) | ||||
|   | ||||
| @@ -68,6 +68,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) { | ||||
| 		V7 aliasstring   `env:"TEST_V7"` | ||||
| 		V8 time.Duration `env:"TEST_V8"` | ||||
| 		V9 time.Time     `env:"TEST_V9"` | ||||
| 		VA bool          `env:"TEST_VA"` | ||||
| 	} | ||||
|  | ||||
| 	data := testdata{ | ||||
| @@ -82,6 +83,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) { | ||||
| 		V7: "7", | ||||
| 		V8: 9, | ||||
| 		V9: time.Unix(1671102873, 0), | ||||
| 		VA: false, | ||||
| 	} | ||||
|  | ||||
| 	t.Setenv("TEST_V1", "846") | ||||
| @@ -93,6 +95,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) { | ||||
| 	t.Setenv("TEST_V7", "AAAAAA") | ||||
| 	t.Setenv("TEST_V8", "1min4s") | ||||
| 	t.Setenv("TEST_V9", "2009-11-10T23:00:00Z") | ||||
| 	t.Setenv("TEST_VA", "true") | ||||
|  | ||||
| 	err := ApplyEnvOverrides("", &data, ".") | ||||
| 	if err != nil { | ||||
| @@ -109,6 +112,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) { | ||||
| 	tst.AssertEqual(t, data.V7, "AAAAAA") | ||||
| 	tst.AssertEqual(t, data.V8, time.Second*64) | ||||
| 	tst.AssertEqual(t, data.V9, time.Unix(1257894000, 0).UTC()) | ||||
| 	tst.AssertEqual(t, data.VA, true) | ||||
| } | ||||
|  | ||||
| func TestApplyEnvOverridesRecursive(t *testing.T) { | ||||
|   | ||||
							
								
								
									
										8
									
								
								cursortoken/direction.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								cursortoken/direction.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | ||||
| package cursortoken | ||||
|  | ||||
| type SortDirection string //@enum:type | ||||
|  | ||||
| const ( | ||||
| 	SortASC  SortDirection = "ASC" | ||||
| 	SortDESC SortDirection = "DESC" | ||||
| ) | ||||
							
								
								
									
										10
									
								
								cursortoken/filter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								cursortoken/filter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| package cursortoken | ||||
|  | ||||
| import ( | ||||
| 	"go.mongodb.org/mongo-driver/mongo" | ||||
| ) | ||||
|  | ||||
| type Filter interface { | ||||
| 	FilterQuery() mongo.Pipeline | ||||
| 	Pagination() (string, SortDirection, *string, *SortDirection) | ||||
| } | ||||
							
								
								
									
										184
									
								
								cursortoken/token.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										184
									
								
								cursortoken/token.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,184 @@ | ||||
| package cursortoken | ||||
|  | ||||
| import ( | ||||
| 	"encoding/base32" | ||||
| 	"encoding/json" | ||||
| 	"errors" | ||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type Mode string | ||||
|  | ||||
| const ( | ||||
| 	CTMStart  Mode = "START" | ||||
| 	CTMNormal Mode = "NORMAL" | ||||
| 	CTMEnd    Mode = "END" | ||||
| ) | ||||
|  | ||||
| type Extra struct { | ||||
| 	Timestamp *time.Time | ||||
| 	Id        *string | ||||
| 	Page      *int | ||||
| 	PageSize  *int | ||||
| } | ||||
|  | ||||
| type CursorToken struct { | ||||
| 	Mode               Mode | ||||
| 	ValuePrimary       string | ||||
| 	ValueSecondary     string | ||||
| 	Direction          SortDirection | ||||
| 	DirectionSecondary SortDirection | ||||
| 	PageSize           int | ||||
| 	Extra              Extra | ||||
| } | ||||
|  | ||||
| type cursorTokenSerialize struct { | ||||
| 	ValuePrimary       *string        `json:"v1,omitempty"` | ||||
| 	ValueSecondary     *string        `json:"v2,omitempty"` | ||||
| 	Direction          *SortDirection `json:"dir,omitempty"` | ||||
| 	DirectionSecondary *SortDirection `json:"dir2,omitempty"` | ||||
| 	PageSize           *int           `json:"size,omitempty"` | ||||
|  | ||||
| 	ExtraTimestamp *time.Time `json:"ts,omitempty"` | ||||
| 	ExtraId        *string    `json:"id,omitempty"` | ||||
| 	ExtraPage      *int       `json:"pg,omitempty"` | ||||
| 	ExtraPageSize  *int       `json:"sz,omitempty"` | ||||
| } | ||||
|  | ||||
| func Start() CursorToken { | ||||
| 	return CursorToken{ | ||||
| 		Mode:               CTMStart, | ||||
| 		ValuePrimary:       "", | ||||
| 		ValueSecondary:     "", | ||||
| 		Direction:          "", | ||||
| 		DirectionSecondary: "", | ||||
| 		PageSize:           0, | ||||
| 		Extra:              Extra{}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func End() CursorToken { | ||||
| 	return CursorToken{ | ||||
| 		Mode:               CTMEnd, | ||||
| 		ValuePrimary:       "", | ||||
| 		ValueSecondary:     "", | ||||
| 		Direction:          "", | ||||
| 		DirectionSecondary: "", | ||||
| 		PageSize:           0, | ||||
| 		Extra:              Extra{}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (c *CursorToken) Token() string { | ||||
| 	if c.Mode == CTMStart { | ||||
| 		return "@start" | ||||
| 	} | ||||
| 	if c.Mode == CTMEnd { | ||||
| 		return "@end" | ||||
| 	} | ||||
|  | ||||
| 	// We kinda manually implement omitempty for the CursorToken here | ||||
| 	// because omitempty does not work for time.Time and otherwise we would always | ||||
| 	// get weird time values when decoding a token that initially didn't have an Timestamp set | ||||
| 	// For this usecase we treat Unix=0 as an empty timestamp | ||||
|  | ||||
| 	sertok := cursorTokenSerialize{} | ||||
|  | ||||
| 	if c.ValuePrimary != "" { | ||||
| 		sertok.ValuePrimary = &c.ValuePrimary | ||||
| 	} | ||||
| 	if c.ValueSecondary != "" { | ||||
| 		sertok.ValueSecondary = &c.ValueSecondary | ||||
| 	} | ||||
| 	if c.Direction != "" { | ||||
| 		sertok.Direction = &c.Direction | ||||
| 	} | ||||
| 	if c.DirectionSecondary != "" { | ||||
| 		sertok.DirectionSecondary = &c.DirectionSecondary | ||||
| 	} | ||||
| 	if c.PageSize != 0 { | ||||
| 		sertok.PageSize = &c.PageSize | ||||
| 	} | ||||
|  | ||||
| 	sertok.ExtraTimestamp = c.Extra.Timestamp | ||||
| 	sertok.ExtraId = c.Extra.Id | ||||
| 	sertok.ExtraPage = c.Extra.Page | ||||
| 	sertok.ExtraPageSize = c.Extra.PageSize | ||||
|  | ||||
| 	body, err := json.Marshal(sertok) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
|  | ||||
| 	return "tok_" + base32.StdEncoding.EncodeToString(body) | ||||
| } | ||||
|  | ||||
| func Decode(tok string) (CursorToken, error) { | ||||
| 	if tok == "" { | ||||
| 		return Start(), nil | ||||
| 	} | ||||
| 	if strings.ToLower(tok) == "@start" { | ||||
| 		return Start(), nil | ||||
| 	} | ||||
| 	if strings.ToLower(tok) == "@end" { | ||||
| 		return End(), nil | ||||
| 	} | ||||
|  | ||||
| 	if !strings.HasPrefix(tok, "tok_") { | ||||
| 		return CursorToken{}, errors.New("could not decode token, missing prefix") | ||||
| 	} | ||||
|  | ||||
| 	body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):]) | ||||
| 	if err != nil { | ||||
| 		return CursorToken{}, err | ||||
| 	} | ||||
|  | ||||
| 	var tokenDeserialize cursorTokenSerialize | ||||
| 	err = json.Unmarshal(body, &tokenDeserialize) | ||||
| 	if err != nil { | ||||
| 		return CursorToken{}, err | ||||
| 	} | ||||
|  | ||||
| 	token := CursorToken{Mode: CTMNormal} | ||||
|  | ||||
| 	if tokenDeserialize.ValuePrimary != nil { | ||||
| 		token.ValuePrimary = *tokenDeserialize.ValuePrimary | ||||
| 	} | ||||
| 	if tokenDeserialize.ValueSecondary != nil { | ||||
| 		token.ValueSecondary = *tokenDeserialize.ValueSecondary | ||||
| 	} | ||||
| 	if tokenDeserialize.Direction != nil { | ||||
| 		token.Direction = *tokenDeserialize.Direction | ||||
| 	} | ||||
| 	if tokenDeserialize.DirectionSecondary != nil { | ||||
| 		token.DirectionSecondary = *tokenDeserialize.DirectionSecondary | ||||
| 	} | ||||
| 	if tokenDeserialize.PageSize != nil { | ||||
| 		token.PageSize = *tokenDeserialize.PageSize | ||||
| 	} | ||||
|  | ||||
| 	token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp | ||||
| 	token.Extra.Id = tokenDeserialize.ExtraId | ||||
| 	token.Extra.Page = tokenDeserialize.ExtraPage | ||||
| 	token.Extra.PageSize = tokenDeserialize.ExtraPageSize | ||||
|  | ||||
| 	return token, nil | ||||
| } | ||||
|  | ||||
| func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) { | ||||
| 	if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil { | ||||
| 		return oid, true | ||||
| 	} else { | ||||
| 		return primitive.ObjectID{}, false | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) { | ||||
| 	if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil { | ||||
| 		return oid, true | ||||
| 	} else { | ||||
| 		return primitive.ObjectID{}, false | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										16
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										16
									
								
								go.mod
									
									
									
									
									
								
							| @@ -3,12 +3,22 @@ module gogs.mikescher.com/BlackForestBytes/goext | ||||
| go 1.19 | ||||
|  | ||||
| require ( | ||||
| 	github.com/jmoiron/sqlx v1.3.5 | ||||
| 	go.mongodb.org/mongo-driver v1.11.1 | ||||
| 	golang.org/x/crypto v0.4.0 | ||||
| 	golang.org/x/sys v0.3.0 | ||||
| 	golang.org/x/term v0.3.0 | ||||
| ) | ||||
|  | ||||
| require ( | ||||
| 	github.com/jmoiron/sqlx v1.3.5 // indirect | ||||
| 	go.mongodb.org/mongo-driver v1.11.1 // indirect | ||||
| 	golang.org/x/crypto v0.4.0 // indirect | ||||
| 	github.com/golang/snappy v0.0.1 // indirect | ||||
| 	github.com/klauspost/compress v1.13.6 // indirect | ||||
| 	github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect | ||||
| 	github.com/pkg/errors v0.9.1 // indirect | ||||
| 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | ||||
| 	github.com/xdg-go/scram v1.1.1 // indirect | ||||
| 	github.com/xdg-go/stringprep v1.0.3 // indirect | ||||
| 	github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect | ||||
| 	golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect | ||||
| 	golang.org/x/text v0.5.0 // indirect | ||||
| ) | ||||
|   | ||||
							
								
								
									
										27
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								go.sum
									
									
									
									
									
								
							| @@ -1,25 +1,43 @@ | ||||
| github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||
| github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | ||||
| github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||
| github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= | ||||
| github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||
| github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM= | ||||
| github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | ||||
| github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | ||||
| github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | ||||
| github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= | ||||
| github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | ||||
| github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= | ||||
| github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= | ||||
| github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= | ||||
| github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= | ||||
| github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= | ||||
| github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= | ||||
| github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= | ||||
| github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= | ||||
| github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | ||||
| github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= | ||||
| github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= | ||||
| github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= | ||||
| github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||
| github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | ||||
| github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | ||||
| github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= | ||||
| github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= | ||||
| github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | ||||
| github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= | ||||
| github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= | ||||
| github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= | ||||
| github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= | ||||
| github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E= | ||||
| github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= | ||||
| github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs= | ||||
| github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= | ||||
| github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= | ||||
| github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= | ||||
| go.mongodb.org/mongo-driver v1.11.1 h1:QP0znIRTuL0jf1oBQoAoM0C6ZJfBK4kx0Uumtv1A7w8= | ||||
| go.mongodb.org/mongo-driver v1.11.1/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8= | ||||
| @@ -27,24 +45,25 @@ golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0 | ||||
| golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8= | ||||
| golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80= | ||||
| golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= | ||||
| golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= | ||||
| golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= | ||||
| golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= | ||||
| golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||
| golang.org/x/term v0.1.0 h1:g6Z6vPFA9dYBAF7DWcH6sCcOntplXsDKcliusYijMlw= | ||||
| golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||
| golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI= | ||||
| golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= | ||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||
| golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= | ||||
| golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= | ||||
| golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | ||||
| golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | ||||
| golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= | ||||
| golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||
| gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||
| gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||
| gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= | ||||
| gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||
|   | ||||
| @@ -31,16 +31,16 @@ func CompareIntArr(arr1 []int, arr2 []int) bool { | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) bool { | ||||
| func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) int { | ||||
|  | ||||
| 	for i := 0; i < len(arr1) || i < len(arr2); i++ { | ||||
|  | ||||
| 		if i < len(arr1) && i < len(arr2) { | ||||
|  | ||||
| 			if arr1[i] < arr2[i] { | ||||
| 				return true | ||||
| 				return -1 | ||||
| 			} else if arr1[i] > arr2[i] { | ||||
| 				return false | ||||
| 				return +2 | ||||
| 			} else { | ||||
| 				continue | ||||
| 			} | ||||
| @@ -49,17 +49,17 @@ func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) bool { | ||||
|  | ||||
| 		if i < len(arr1) { | ||||
|  | ||||
| 			return true | ||||
| 			return +1 | ||||
|  | ||||
| 		} else { // if i < len(arr2) | ||||
|  | ||||
| 			return false | ||||
| 			return -1 | ||||
|  | ||||
| 		} | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	return false | ||||
| 	return 0 | ||||
| } | ||||
|  | ||||
| func CompareString(a, b string) int { | ||||
|   | ||||
| @@ -1,5 +1,10 @@ | ||||
| package langext | ||||
|  | ||||
| type MapEntry[T comparable, V any] struct { | ||||
| 	Key   T | ||||
| 	Value V | ||||
| } | ||||
|  | ||||
| func MapKeyArr[T comparable, V any](v map[T]V) []T { | ||||
| 	result := make([]T, 0, len(v)) | ||||
| 	for k := range v { | ||||
| @@ -8,6 +13,14 @@ func MapKeyArr[T comparable, V any](v map[T]V) []T { | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func MapValueArr[T comparable, V any](v map[T]V) []V { | ||||
| 	result := make([]V, 0, len(v)) | ||||
| 	for _, mv := range v { | ||||
| 		result = append(result, mv) | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V { | ||||
| 	result := make(map[T]V, len(a)) | ||||
| 	for _, v := range a { | ||||
| @@ -16,6 +29,17 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V { | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func MapToArr[T comparable, V any](v map[T]V) []MapEntry[T, V] { | ||||
| 	result := make([]MapEntry[T, V], 0, len(v)) | ||||
| 	for mk, mv := range v { | ||||
| 		result = append(result, MapEntry[T, V]{ | ||||
| 			Key:   mk, | ||||
| 			Value: mv, | ||||
| 		}) | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func CopyMap[K comparable, V any](a map[K]V) map[K]V { | ||||
| 	result := make(map[K]V, len(a)) | ||||
| 	for k, v := range a { | ||||
| @@ -23,3 +47,11 @@ func CopyMap[K comparable, V any](a map[K]V) map[K]V { | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func ForceMap[K comparable, V any](v map[K]V) map[K]V { | ||||
| 	if v == nil { | ||||
| 		return make(map[K]V, 0) | ||||
| 	} else { | ||||
| 		return v | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -4,6 +4,12 @@ import ( | ||||
| 	"reflect" | ||||
| ) | ||||
|  | ||||
| // PTrue := &true | ||||
| var PTrue = Ptr(true) | ||||
|  | ||||
| // PFalse := &false | ||||
| var PFalse = Ptr(false) | ||||
|  | ||||
| func Ptr[T any](v T) *T { | ||||
| 	return &v | ||||
| } | ||||
|   | ||||
| @@ -4,7 +4,7 @@ import ( | ||||
| 	"reflect" | ||||
| ) | ||||
|  | ||||
| var reflectBasicTypes = []reflect.Type{ | ||||
| var reflectBasicTypes = map[reflect.Kind]reflect.Type{ | ||||
| 	reflect.Bool:       reflect.TypeOf(false), | ||||
| 	reflect.Int:        reflect.TypeOf(int(0)), | ||||
| 	reflect.Int8:       reflect.TypeOf(int8(0)), | ||||
| @@ -109,3 +109,28 @@ func TryCast[T any](v any) (T, bool) { | ||||
|  | ||||
| 	return r4, true | ||||
| } | ||||
|  | ||||
| func TryCastType(v any, dest reflect.Type) (any, bool) { | ||||
|  | ||||
| 	underlying := Underlying(reflect.TypeOf(v)) | ||||
|  | ||||
| 	if underlying != Underlying(dest) { | ||||
| 		return nil, false | ||||
| 	} | ||||
|  | ||||
| 	r1 := reflect.ValueOf(v) | ||||
|  | ||||
| 	if !r1.CanConvert(underlying) { | ||||
| 		return nil, false | ||||
| 	} | ||||
|  | ||||
| 	r2 := r1.Convert(underlying) | ||||
|  | ||||
| 	if !r2.CanConvert(dest) { | ||||
| 		return nil, false | ||||
| 	} | ||||
|  | ||||
| 	r4 := r2.Convert(dest) | ||||
|  | ||||
| 	return r4.Interface(), true | ||||
| } | ||||
|   | ||||
| @@ -41,6 +41,14 @@ func NewHexUUID() (string, error) { | ||||
| 	return string(dst), nil | ||||
| } | ||||
|  | ||||
| func MustHexUUID() string { | ||||
| 	v, err := NewHexUUID() | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| func NewUpperHexUUID() (string, error) { | ||||
| 	uuid, err := NewUUID() | ||||
| 	if err != nil { | ||||
| @@ -64,6 +72,14 @@ func NewUpperHexUUID() (string, error) { | ||||
| 	return strings.ToUpper(string(dst)), nil | ||||
| } | ||||
|  | ||||
| func MustUpperHexUUID() string { | ||||
| 	v, err := NewUpperHexUUID() | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| func NewRawHexUUID() (string, error) { | ||||
| 	uuid, err := NewUUID() | ||||
| 	if err != nil { | ||||
| @@ -83,6 +99,14 @@ func NewRawHexUUID() (string, error) { | ||||
| 	return strings.ToUpper(string(dst)), nil | ||||
| } | ||||
|  | ||||
| func MustRawHexUUID() string { | ||||
| 	v, err := NewRawHexUUID() | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| func NewBracesUUID() (string, error) { | ||||
| 	uuid, err := NewUUID() | ||||
| 	if err != nil { | ||||
| @@ -108,6 +132,14 @@ func NewBracesUUID() (string, error) { | ||||
| 	return strings.ToUpper(string(dst)), nil | ||||
| } | ||||
|  | ||||
| func MustBracesUUID() string { | ||||
| 	v, err := NewBracesUUID() | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| func NewParensUUID() (string, error) { | ||||
| 	uuid, err := NewUUID() | ||||
| 	if err != nil { | ||||
| @@ -132,3 +164,11 @@ func NewParensUUID() (string, error) { | ||||
|  | ||||
| 	return strings.ToUpper(string(dst)), nil | ||||
| } | ||||
|  | ||||
| func MustParensUUID() string { | ||||
| 	v, err := NewParensUUID() | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| package rfctime | ||||
|  | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||
| 	"testing" | ||||
| 	"time" | ||||
| ) | ||||
|   | ||||
| @@ -81,7 +81,7 @@ func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||
| 		return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt)) | ||||
| 	} | ||||
| 	var tt time.Time | ||||
| 	err := bson.Unmarshal(data, &tt) | ||||
| 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| @@ -116,6 +116,12 @@ func (t RFC3339Time) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueRead | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if val.Kind() == reflect.Ptr { | ||||
| 		val.Set(reflect.ValueOf(&t)) | ||||
| 	} else { | ||||
| 		val.Set(reflect.ValueOf(t)) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
|   | ||||
							
								
								
									
										91
									
								
								sq/converter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										91
									
								
								sq/converter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,91 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| //TODO UNFINISHED | ||||
| // this is not finished | ||||
| // idea was that we can register converter in the database struct | ||||
| // they get inherited from the transactions | ||||
| // and when marshallingunmarshaling (sq.Query | sq.QueryAll) | ||||
| // or marshaling (sq.InsertSingle) | ||||
| // the types get converter automatically... | ||||
|  | ||||
| type DBTypeConverter interface { | ||||
| 	ModelTypeString() string | ||||
| 	DBTypeString() string | ||||
| 	ModelToDB(v any) (any, error) | ||||
| 	DBToModel(v any) (any, error) | ||||
| } | ||||
|  | ||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int](func(v bool) (int, error) { | ||||
| 	return langext.Conditional(v, 1, 0), nil | ||||
| }, func(v int) (bool, error) { | ||||
| 	if v == 0 { | ||||
| 		return false, nil | ||||
| 	} | ||||
| 	if v == 1 { | ||||
| 		return true, nil | ||||
| 	} | ||||
| 	return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v)) | ||||
| }) | ||||
|  | ||||
| var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) { | ||||
| 	return v.UnixMilli(), nil | ||||
| }, func(v int64) (time.Time, error) { | ||||
| 	return time.UnixMilli(v), nil | ||||
| }) | ||||
|  | ||||
| var ConverterOptTimeToUnixMillis = NewDBTypeConverter[*time.Time, *int64](func(v *time.Time) (*int64, error) { | ||||
| 	if v == nil { | ||||
| 		return nil, nil | ||||
| 	} | ||||
| 	return langext.Ptr(v.UnixMilli()), nil | ||||
| }, func(v *int64) (*time.Time, error) { | ||||
| 	if v == nil { | ||||
| 		return nil, nil | ||||
| 	} | ||||
| 	return langext.Ptr(time.UnixMilli(*v)), nil | ||||
| }) | ||||
|  | ||||
| type dbTypeConverterImpl[TModelData any, TDBData any] struct { | ||||
| 	dbTypeString    string | ||||
| 	modelTypeString string | ||||
| 	todb            func(v TModelData) (TDBData, error) | ||||
| 	tomodel         func(v TDBData) (TModelData, error) | ||||
| } | ||||
|  | ||||
| func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelTypeString() string { | ||||
| 	return t.modelTypeString | ||||
| } | ||||
|  | ||||
| func (t *dbTypeConverterImpl[TModelData, TDBData]) DBTypeString() string { | ||||
| 	return t.dbTypeString | ||||
| } | ||||
|  | ||||
| func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelToDB(v any) (any, error) { | ||||
| 	if vv, ok := v.(TModelData); ok { | ||||
| 		return t.todb(vv) | ||||
| 	} | ||||
| 	return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.modelTypeString, v)) | ||||
| } | ||||
|  | ||||
| func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error) { | ||||
| 	if vv, ok := v.(TDBData); ok { | ||||
| 		return t.tomodel(vv) | ||||
| 	} | ||||
| 	return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v)) | ||||
| } | ||||
|  | ||||
| func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter { | ||||
| 	return &dbTypeConverterImpl[TModelData, TDBData]{ | ||||
| 		dbTypeString:    fmt.Sprintf("%T", *new(TDBData)), | ||||
| 		modelTypeString: fmt.Sprintf("%T", *new(TModelData)), | ||||
| 		todb:            todb, | ||||
| 		tomodel:         tomodel, | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										199
									
								
								sq/hasher.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										199
									
								
								sq/hasher.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,199 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"crypto/sha256" | ||||
| 	"encoding/hex" | ||||
| 	"encoding/json" | ||||
| 	"fmt" | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"os" | ||||
| 	"path/filepath" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| func HashSqliteSchema(ctx context.Context, schemaStr string) (string, error) { | ||||
| 	dbdir := os.TempDir() | ||||
| 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||
|  | ||||
| 	err := os.MkdirAll(dbdir, os.ModePerm) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	url := fmt.Sprintf("file:%s?_journal=%s&_timeout=%d&_fk=%s&_busy_timeout=%d", dbfile1, "DELETE", 1000, "true", 1000) | ||||
|  | ||||
| 	xdb, err := sqlx.Open("sqlite3", url) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
|  | ||||
| 	_, err = db.Exec(ctx, schemaStr, PP{}) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	return HashSqliteDatabase(ctx, db) | ||||
| } | ||||
|  | ||||
| func HashSqliteDatabase(ctx context.Context, db DB) (string, error) { | ||||
| 	ss, err := CreateSqliteDatabaseSchemaString(ctx, db) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	cs := sha256.Sum256([]byte(ss)) | ||||
|  | ||||
| 	return hex.EncodeToString(cs[:]), nil | ||||
| } | ||||
|  | ||||
| func CreateSqliteDatabaseSchemaString(ctx context.Context, db DB) (string, error) { | ||||
|  | ||||
| 	type colInfo struct { | ||||
| 		Name       string  `db:"name"` | ||||
| 		Type       string  `db:"type"` | ||||
| 		NotNull    string  `db:"notnull"` | ||||
| 		Default    *string `db:"dflt_value"` | ||||
| 		PrimaryKey *string `db:"pk"` | ||||
| 	} | ||||
|  | ||||
| 	type idxInfo struct { | ||||
| 		Name   string `json:"name"       db:"name"` | ||||
| 		Unique int    `json:"unique"     db:"unique"` | ||||
| 		Origin string `json:"origin"     db:"origin"` | ||||
| 		Patial int    `json:"partial"    db:"partial"` | ||||
| 	} | ||||
|  | ||||
| 	type fkyInfo struct { | ||||
| 		TableDest string `json:"table_dest"  db:"table"` | ||||
| 		From      string `json:"from"        db:"from"` | ||||
| 		To        string `json:"to"          db:"to"` | ||||
| 		OnUpdate  string `json:"on_update"   db:"on_update"` | ||||
| 		OnDelete  string `json:"on_delete"   db:"on_delete"` | ||||
| 		Match     string `json:"match"       db:"match"` | ||||
| 	} | ||||
|  | ||||
| 	type tabInfo struct { | ||||
| 		Name   string `json:"name"    db:"name"` | ||||
| 		Type   string `json:"type"    db:"type"` | ||||
| 		NumCol int    `json:"ncol"    db:"ncol"` | ||||
| 		Strict int    `json:"strict"  db:"strict"` | ||||
|  | ||||
| 		ColumnInfo []colInfo `json:"-"` | ||||
| 		IndexInfo  []idxInfo `json:"-"` | ||||
| 		FKeyInfo   []fkyInfo `json:"-"` | ||||
| 	} | ||||
|  | ||||
| 	rowsTableList, err := db.Query(ctx, "PRAGMA table_list;", PP{}) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	tableList, err := ScanAll[tabInfo](rowsTableList, SModeFast, Unsafe, true) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	langext.SortBy(tableList, func(v tabInfo) string { return v.Name }) | ||||
|  | ||||
| 	result := make([]tabInfo, 0) | ||||
|  | ||||
| 	for i, tab := range tableList { | ||||
|  | ||||
| 		if strings.HasPrefix(tab.Name, "sqlite_") { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		{ | ||||
|  | ||||
| 			rowsColumnList, err := db.Query(ctx, fmt.Sprintf("PRAGMA table_info(\"%s\");", tab.Name), PP{}) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
|  | ||||
| 			columnList, err := ScanAll[colInfo](rowsColumnList, SModeFast, Unsafe, true) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
|  | ||||
| 			langext.SortBy(columnList, func(v colInfo) string { return v.Name }) | ||||
|  | ||||
| 			tableList[i].ColumnInfo = columnList | ||||
| 		} | ||||
|  | ||||
| 		{ | ||||
| 			rowsIdxList, err := db.Query(ctx, fmt.Sprintf("PRAGMA index_list(\"%s\");", tab.Name), PP{}) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
| 			idxList, err := ScanAll[idxInfo](rowsIdxList, SModeFast, Unsafe, true) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
|  | ||||
| 			langext.SortBy(idxList, func(v idxInfo) string { return v.Name }) | ||||
|  | ||||
| 			tableList[i].IndexInfo = idxList | ||||
| 		} | ||||
|  | ||||
| 		{ | ||||
| 			rowsIdxList, err := db.Query(ctx, fmt.Sprintf("PRAGMA foreign_key_list(\"%s\");", tab.Name), PP{}) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
| 			fkyList, err := ScanAll[fkyInfo](rowsIdxList, SModeFast, Unsafe, true) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
|  | ||||
| 			langext.SortBy(fkyList, func(v fkyInfo) string { return v.From }) | ||||
|  | ||||
| 			tableList[i].FKeyInfo = fkyList | ||||
| 		} | ||||
|  | ||||
| 		result = append(result, tableList[i]) | ||||
| 	} | ||||
|  | ||||
| 	strBuilderResult := "" | ||||
| 	for _, vTab := range result { | ||||
| 		jbinTable, err := json.Marshal(vTab) | ||||
| 		if err != nil { | ||||
| 			return "", err | ||||
| 		} | ||||
|  | ||||
| 		strBuilderResult += fmt.Sprintf("#TABLE: %s\n{\n", string(jbinTable)) | ||||
|  | ||||
| 		for _, vCol := range vTab.ColumnInfo { | ||||
| 			jbinColumn, err := json.Marshal(vCol) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
|  | ||||
| 			strBuilderResult += fmt.Sprintf("    COLUMN: %s\n", string(jbinColumn)) | ||||
| 		} | ||||
|  | ||||
| 		for _, vIdx := range vTab.IndexInfo { | ||||
| 			jbinIndex, err := json.Marshal(vIdx) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
|  | ||||
| 			strBuilderResult += fmt.Sprintf("    INDEX:  %s\n", string(jbinIndex)) | ||||
| 		} | ||||
|  | ||||
| 		for _, vFky := range vTab.FKeyInfo { | ||||
| 			jbinFKey, err := json.Marshal(vFky) | ||||
| 			if err != nil { | ||||
| 				return "", err | ||||
| 			} | ||||
|  | ||||
| 			strBuilderResult += fmt.Sprintf("    FKEY:   %s\n", string(jbinFKey)) | ||||
| 		} | ||||
|  | ||||
| 		strBuilderResult += "}\n\n" | ||||
| 	} | ||||
|  | ||||
| 	return strBuilderResult, nil | ||||
| } | ||||
| @@ -1,9 +1,13 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"database/sql" | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| type StructScanMode string | ||||
| @@ -16,10 +20,79 @@ const ( | ||||
| type StructScanSafety string | ||||
|  | ||||
| const ( | ||||
| 	Safe   StructScanSafety = "SAFE" | ||||
| 	Unsafe StructScanSafety = "UNSAFE" | ||||
| 	Safe   StructScanSafety = "SAFE"   // return error for missing fields | ||||
| 	Unsafe StructScanSafety = "UNSAFE" // ignore missing fields | ||||
| ) | ||||
|  | ||||
| func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, v TData) (sql.Result, error) { | ||||
|  | ||||
| 	rval := reflect.ValueOf(v) | ||||
| 	rtyp := rval.Type() | ||||
|  | ||||
| 	columns := make([]string, 0) | ||||
| 	params := make([]string, 0) | ||||
| 	pp := PP{} | ||||
|  | ||||
| 	for i := 0; i < rtyp.NumField(); i++ { | ||||
|  | ||||
| 		rsfield := rtyp.Field(i) | ||||
| 		rvfield := rval.Field(i) | ||||
|  | ||||
| 		if !rsfield.IsExported() { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		columnName := rsfield.Tag.Get("db") | ||||
| 		if columnName == "" || columnName == "-" { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		paramkey := fmt.Sprintf("_%s", columnName) | ||||
|  | ||||
| 		columns = append(columns, "\""+columnName+"\"") | ||||
| 		params = append(params, ":"+paramkey) | ||||
| 		pp[paramkey] = rvfield.Interface() | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	sqlstr := fmt.Sprintf("INSERT"+" INTO \"%s\" (%s) VALUES (%s)", tableName, strings.Join(columns, ", "), strings.Join(params, ", ")) | ||||
|  | ||||
| 	sqlr, err := q.Exec(ctx, sqlstr, pp) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return sqlr, nil | ||||
| } | ||||
|  | ||||
| func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) (TData, error) { | ||||
| 	rows, err := q.Query(ctx, sql, pp) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	data, err := ScanSingle[TData](rows, mode, sec, true) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	return data, nil | ||||
| } | ||||
|  | ||||
| func QueryAll[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) ([]TData, error) { | ||||
| 	rows, err := q.Query(ctx, sql, pp) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	data, err := ScanAll[TData](rows, mode, sec, true) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return data, nil | ||||
| } | ||||
|  | ||||
| func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) (TData, error) { | ||||
| 	if rows.Next() { | ||||
| 		var strscan *StructScanner | ||||
|   | ||||
							
								
								
									
										11
									
								
								wmo/mongo.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								wmo/mongo.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| package wmo | ||||
|  | ||||
| import "go.mongodb.org/mongo-driver/mongo" | ||||
|  | ||||
| func W[TData any](collection *mongo.Collection) *Coll[TData] { | ||||
| 	c := Coll[TData]{coll: collection} | ||||
|  | ||||
| 	c.init() | ||||
|  | ||||
| 	return &c | ||||
| } | ||||
							
								
								
									
										69
									
								
								wmo/pagination.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										69
									
								
								wmo/pagination.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,69 @@ | ||||
| package wmo | ||||
|  | ||||
| import ( | ||||
| 	"go.mongodb.org/mongo-driver/bson" | ||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||
| ) | ||||
|  | ||||
| func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { | ||||
|  | ||||
| 	cond := bson.A{} | ||||
| 	sort := bson.D{} | ||||
|  | ||||
| 	valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	if sortPrimary == ct.SortASC { | ||||
| 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary | ||||
| 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}}) | ||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||
| 	} else if sortPrimary == ct.SortDESC { | ||||
| 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary | ||||
| 		cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}}) | ||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||
| 	} | ||||
|  | ||||
| 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { | ||||
|  | ||||
| 		valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
|  | ||||
| 		if *sortSecondary == ct.SortASC { | ||||
|  | ||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer) | ||||
| 			cond = append(cond, bson.M{"$and": bson.A{ | ||||
| 				bson.M{fieldPrimary: valuePrimary}, | ||||
| 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, | ||||
| 			}}) | ||||
|  | ||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||
|  | ||||
| 		} else if *sortSecondary == ct.SortDESC { | ||||
|  | ||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older) | ||||
| 			cond = append(cond, bson.M{"$and": bson.A{ | ||||
| 				bson.M{fieldPrimary: valuePrimary}, | ||||
| 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, | ||||
| 			}}) | ||||
|  | ||||
| 			sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	pipeline := make([]bson.D, 0, 3) | ||||
|  | ||||
| 	pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}}) | ||||
|  | ||||
| 	pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}}) | ||||
|  | ||||
| 	if pageSize != nil { | ||||
| 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}}) | ||||
| 	} | ||||
|  | ||||
| 	return pipeline, nil | ||||
| } | ||||
							
								
								
									
										294
									
								
								wmo/reflection.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										294
									
								
								wmo/reflection.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,294 @@ | ||||
| package wmo | ||||
|  | ||||
| import ( | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| func (c *Coll[TData]) init() { | ||||
|  | ||||
| 	c.dataTypeMap = make(map[string]fullTypeRef[TData]) | ||||
|  | ||||
| 	example := *new(TData) | ||||
|  | ||||
| 	v := reflect.ValueOf(example) | ||||
|  | ||||
| 	c.initFields("", v, make([]int, 0)) | ||||
|  | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, idxarr []int) { | ||||
|  | ||||
| 	rtyp := rval.Type() | ||||
|  | ||||
| 	for i := 0; i < rtyp.NumField(); i++ { | ||||
|  | ||||
| 		rsfield := rtyp.Field(i) | ||||
| 		rvfield := rval.Field(i) | ||||
|  | ||||
| 		if !rsfield.IsExported() { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		bsonkey, found := rsfield.Tag.Lookup("bson") | ||||
| 		if !found || bsonkey == "-" { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		fullKey := prefix + bsonkey | ||||
|  | ||||
| 		newIdxArr := langext.ArrCopy(idxarr) | ||||
| 		newIdxArr = append(newIdxArr, i) | ||||
|  | ||||
| 		if rvfield.Type().Kind() == reflect.Pointer { | ||||
|  | ||||
| 			c.dataTypeMap[fullKey] = fullTypeRef[TData]{ | ||||
| 				IsPointer:      true, | ||||
| 				Kind:           rvfield.Type().Elem().Kind(), | ||||
| 				Type:           rvfield.Type().Elem(), | ||||
| 				UnderlyingType: langext.Underlying(rvfield.Type().Elem()), | ||||
| 				Name:           rsfield.Name, | ||||
| 				Index:          newIdxArr, | ||||
| 			} | ||||
|  | ||||
| 		} else { | ||||
|  | ||||
| 			c.dataTypeMap[fullKey] = fullTypeRef[TData]{ | ||||
| 				IsPointer:      false, | ||||
| 				Kind:           rvfield.Type().Kind(), | ||||
| 				Type:           rvfield.Type(), | ||||
| 				UnderlyingType: langext.Underlying(rvfield.Type()), | ||||
| 				Name:           rsfield.Name, | ||||
| 				Index:          newIdxArr, | ||||
| 			} | ||||
|  | ||||
| 		} | ||||
|  | ||||
| 		if rvfield.Kind() == reflect.Struct { | ||||
| 			c.initFields(fullKey+".", rvfield, newIdxArr) | ||||
| 		} | ||||
|  | ||||
| 	} | ||||
|  | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) getTokenValueAsMongoType(value string, fieldName string) (any, error) { | ||||
| 	fref := c.dataTypeMap[fieldName] | ||||
|  | ||||
| 	if fref.IsPointer && value == "" { | ||||
| 		pointertype := reflect.New(fref.Type).Type() | ||||
| 		nilvalue := reflect.Zero(pointertype) | ||||
| 		outvalue := nilvalue.Interface() | ||||
| 		return outvalue, nil | ||||
| 	} | ||||
|  | ||||
| 	pointerize := func(v any) any { | ||||
| 		if !fref.IsPointer { | ||||
| 			return v | ||||
| 		} | ||||
|  | ||||
| 		rval1 := reflect.ValueOf(v) | ||||
| 		rval2 := rval1.Convert(fref.Type) | ||||
| 		rval3 := reflect.New(fref.Type) | ||||
| 		rval3.Elem().Set(rval2) | ||||
| 		return rval3.Interface() | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf("") { | ||||
|  | ||||
| 		rt, ok := langext.TryCastType(value, fref.Type) | ||||
| 		if !ok { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from string to %s", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return pointerize(rt), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(time.Time{}) { | ||||
|  | ||||
| 		t, err := time.Parse(time.RFC3339Nano, value) | ||||
| 		if err != nil { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as time.Time ('%s')", fieldName, value)) | ||||
| 		} | ||||
|  | ||||
| 		rt, ok := langext.TryCastType(t, fref.Type) | ||||
| 		if !ok { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from time.Time to %s", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return pointerize(rt), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(int(0)) { | ||||
|  | ||||
| 		t, err := strconv.ParseInt(value, 10, 64) | ||||
| 		if err != nil { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int64 ('%s')", fieldName, value)) | ||||
| 		} | ||||
|  | ||||
| 		rt, ok := langext.TryCastType(int(t), fref.Type) | ||||
| 		if !ok { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int to %s", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return pointerize(rt), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(int32(0)) { | ||||
|  | ||||
| 		t, err := strconv.ParseInt(value, 10, 64) | ||||
| 		if err != nil { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int32 ('%s')", fieldName, value)) | ||||
| 		} | ||||
|  | ||||
| 		rt, ok := langext.TryCastType(int32(t), fref.Type) | ||||
| 		if !ok { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int32 to %s", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return pointerize(rt), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(int64(0)) { | ||||
|  | ||||
| 		t, err := strconv.ParseInt(value, 10, 64) | ||||
| 		if err != nil { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int64 ('%s')", fieldName, value)) | ||||
| 		} | ||||
|  | ||||
| 		rt, ok := langext.TryCastType(int64(t), fref.Type) | ||||
| 		if !ok { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int64 to %s", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return pointerize(rt), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(float32(0)) { | ||||
|  | ||||
| 		t, err := strconv.ParseFloat(value, 64) | ||||
| 		if err != nil { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as float32 ('%s')", fieldName, value)) | ||||
| 		} | ||||
|  | ||||
| 		rt, ok := langext.TryCastType(float32(t), fref.Type) | ||||
| 		if !ok { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from float32 to %s", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return pointerize(rt), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(float64(0)) { | ||||
|  | ||||
| 		t, err := strconv.ParseFloat(value, 64) | ||||
| 		if err != nil { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as float64 ('%s')", fieldName, value)) | ||||
| 		} | ||||
|  | ||||
| 		rt, ok := langext.TryCastType(float64(t), fref.Type) | ||||
| 		if !ok { | ||||
| 			return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from float64 to %s", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return pointerize(rt), nil | ||||
| 	} | ||||
|  | ||||
| 	return nil, errors.New(fmt.Sprintf("failed to parse field '%s' of type %s (%s)", fieldName, fref.Type.String(), fref.UnderlyingType.String())) | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) getFieldValueAsTokenString(entity TData, fieldName string) (string, error) { | ||||
| 	fref := c.dataTypeMap[fieldName] | ||||
|  | ||||
| 	realValue := c.getFieldValue(entity, fieldName) | ||||
|  | ||||
| 	if langext.IsNil(realValue) { | ||||
| 		return "", nil | ||||
| 	} | ||||
|  | ||||
| 	reflValue := reflect.ValueOf(realValue) | ||||
| 	if reflValue.Kind() == reflect.Pointer { | ||||
| 		reflValue = reflValue.Elem() | ||||
| 		realValue = reflValue.Interface() | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf("") { | ||||
|  | ||||
| 		rt, ok := langext.TryCastType(realValue, reflect.TypeOf("")) | ||||
| 		if !ok { | ||||
| 			return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to string", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return rt.(string), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(time.Time{}) { | ||||
| 		rt, ok := langext.TryCastType(realValue, reflect.TypeOf(time.Time{})) | ||||
| 		if !ok { | ||||
| 			return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to time.Time", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return rt.(time.Time).Format(time.RFC3339Nano), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(int(0)) { | ||||
| 		rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int(0))) | ||||
| 		if !ok { | ||||
| 			return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return strconv.Itoa(rt.(int)), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(int32(0)) { | ||||
| 		rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int32(0))) | ||||
| 		if !ok { | ||||
| 			return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int32", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return strconv.FormatInt(int64(rt.(int32)), 10), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(int64(0)) { | ||||
| 		rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int64(0))) | ||||
| 		if !ok { | ||||
| 			return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int64", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return strconv.FormatInt(rt.(int64), 10), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(float32(0)) { | ||||
| 		rt, ok := langext.TryCastType(realValue, reflect.TypeOf(float32(0))) | ||||
| 		if !ok { | ||||
| 			return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to float32", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return strconv.FormatFloat(float64(rt.(float32)), 'f', -1, 32), nil | ||||
| 	} | ||||
|  | ||||
| 	if fref.UnderlyingType == reflect.TypeOf(float64(0)) { | ||||
| 		rt, ok := langext.TryCastType(realValue, reflect.TypeOf(float64(0))) | ||||
| 		if !ok { | ||||
| 			return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to float64", fieldName, fref.Type.String())) | ||||
| 		} | ||||
|  | ||||
| 		return strconv.FormatFloat(rt.(float64), 'f', -1, 64), nil | ||||
| 	} | ||||
|  | ||||
| 	return "", errors.New(fmt.Sprintf("failed to parse field '%s' of type %s (%s)", fieldName, fref.Type.String(), fref.UnderlyingType.String())) | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) getFieldType(fieldName string) fullTypeRef[TData] { | ||||
| 	return c.dataTypeMap[fieldName] | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) getFieldValue(data TData, fieldName string) any { | ||||
| 	fref := c.dataTypeMap[fieldName] | ||||
| 	rval := reflect.ValueOf(data) | ||||
| 	return rval.FieldByIndex(fref.Index).Interface() | ||||
| } | ||||
							
								
								
									
										160
									
								
								wmo/reflection_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										160
									
								
								wmo/reflection_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,160 @@ | ||||
| package wmo | ||||
|  | ||||
| import ( | ||||
| 	"go.mongodb.org/mongo-driver/mongo" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||
| 	"testing" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| func TestReflectionGetFieldType(t *testing.T) { | ||||
|  | ||||
| 	type IDType string | ||||
|  | ||||
| 	type TestData struct { | ||||
| 		ID    IDType    `bson:"_id"` | ||||
| 		CDate time.Time `bson:"cdate"` | ||||
| 		Sub   struct { | ||||
| 			A string `bson:"a"` | ||||
| 		} `bson:"sub"` | ||||
| 		Str string `bson:"str"` | ||||
| 		Ptr *int   `bson:"ptr"` | ||||
| 	} | ||||
|  | ||||
| 	coll := W[TestData](&mongo.Collection{}) | ||||
|  | ||||
| 	coll.init() | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	d := TestData{ | ||||
| 		ID:    "1", | ||||
| 		CDate: t0, | ||||
| 		Sub: struct { | ||||
| 			A string `bson:"a"` | ||||
| 		}{ | ||||
| 			A: "2", | ||||
| 		}, | ||||
| 		Str: "3", | ||||
| 		Ptr: langext.Ptr(4), | ||||
| 	} | ||||
|  | ||||
| 	tst.AssertEqual(t, coll.getFieldType("_id").Kind.String(), "string") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("_id").Type.String(), "wmo.IDType") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("_id").Name, "ID") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("_id").IsPointer, false) | ||||
| 	tst.AssertEqual(t, coll.getFieldValue(d, "_id").(IDType), "1") | ||||
|  | ||||
| 	tst.AssertEqual(t, coll.getFieldType("cdate").Kind.String(), "struct") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("cdate").Type.String(), "time.Time") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("cdate").Name, "CDate") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("cdate").IsPointer, false) | ||||
| 	tst.AssertEqual(t, coll.getFieldValue(d, "cdate").(time.Time), t0) | ||||
|  | ||||
| 	tst.AssertEqual(t, coll.getFieldType("sub.a").Kind.String(), "string") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("sub.a").Type.String(), "string") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("sub.a").Name, "A") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("sub.a").IsPointer, false) | ||||
| 	tst.AssertEqual(t, coll.getFieldValue(d, "sub.a").(string), "2") | ||||
|  | ||||
| 	tst.AssertEqual(t, coll.getFieldType("str").Kind.String(), "string") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("str").Type.String(), "string") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("str").Name, "Str") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("str").IsPointer, false) | ||||
| 	tst.AssertEqual(t, coll.getFieldValue(d, "str").(string), "3") | ||||
|  | ||||
| 	tst.AssertEqual(t, coll.getFieldType("ptr").Kind.String(), "int") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("ptr").Type.String(), "int") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("ptr").Name, "Ptr") | ||||
| 	tst.AssertEqual(t, coll.getFieldType("ptr").IsPointer, true) | ||||
| 	tst.AssertEqual(t, *coll.getFieldValue(d, "ptr").(*int), 4) | ||||
| } | ||||
|  | ||||
| func TestReflectionGetTokenValueAsMongoType(t *testing.T) { | ||||
|  | ||||
| 	type IDType string | ||||
|  | ||||
| 	type TestData struct { | ||||
| 		ID    IDType    `bson:"_id"` | ||||
| 		CDate time.Time `bson:"cdate"` | ||||
| 		Sub   struct { | ||||
| 			A string `bson:"a"` | ||||
| 		} `bson:"sub"` | ||||
| 		Str string `bson:"str"` | ||||
| 		Ptr *int   `bson:"ptr"` | ||||
| 		Num int    `bson:"num"` | ||||
| 	} | ||||
|  | ||||
| 	coll := W[TestData](&mongo.Collection{}) | ||||
|  | ||||
| 	coll.init() | ||||
|  | ||||
| 	gtvasmt := func(value string, fieldName string) any { | ||||
| 		v, err := coll.getTokenValueAsMongoType(value, fieldName) | ||||
| 		if err != nil { | ||||
| 			t.Errorf("%s", "failed to getTokenValueAsMongoType") | ||||
| 		} | ||||
| 		return v | ||||
| 	} | ||||
|  | ||||
| 	tst.AssertEqual(t, gtvasmt("hello", "str").(string), "hello") | ||||
| 	tst.AssertEqual(t, gtvasmt("4", "num").(int), 4) | ||||
| 	tst.AssertEqual(t, gtvasmt("asdf", "_id").(IDType), "asdf") | ||||
| 	tst.AssertEqual(t, gtvasmt("", "ptr").(*int), nil) | ||||
| 	tst.AssertEqual(t, *(gtvasmt("123", "ptr").(*int)), 123) | ||||
| } | ||||
|  | ||||
| func TestReflectionGetFieldValueAsTokenString(t *testing.T) { | ||||
|  | ||||
| 	type IDType string | ||||
|  | ||||
| 	type TestData struct { | ||||
| 		ID    IDType    `bson:"_id"` | ||||
| 		CDate time.Time `bson:"cdate"` | ||||
| 		Sub   struct { | ||||
| 			A string `bson:"a"` | ||||
| 		} `bson:"sub"` | ||||
| 		Str  string  `bson:"str"` | ||||
| 		Ptr  *int    `bson:"ptr"` | ||||
| 		Num  int     `bson:"num"` | ||||
| 		Ptr2 *int    `bson:"ptr2"` | ||||
| 		FFF  float64 `bson:"fff"` | ||||
| 	} | ||||
|  | ||||
| 	coll := W[TestData](&mongo.Collection{}) | ||||
|  | ||||
| 	coll.init() | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	d := TestData{ | ||||
| 		ID:    "1", | ||||
| 		CDate: t0, | ||||
| 		Sub: struct { | ||||
| 			A string `bson:"a"` | ||||
| 		}{ | ||||
| 			A: "2", | ||||
| 		}, | ||||
| 		Str:  "3", | ||||
| 		Ptr:  langext.Ptr(4), | ||||
| 		Num:  22, | ||||
| 		FFF:  22.5, | ||||
| 		Ptr2: nil, | ||||
| 	} | ||||
|  | ||||
| 	gfvats := func(value TestData, fieldName string) string { | ||||
| 		v, err := coll.getFieldValueAsTokenString(value, fieldName) | ||||
| 		if err != nil { | ||||
| 			t.Errorf("%s", "failed to getTokenValueAsMongoType") | ||||
| 		} | ||||
| 		return v | ||||
| 	} | ||||
|  | ||||
| 	tst.AssertEqual(t, gfvats(d, "str"), "3") | ||||
| 	tst.AssertEqual(t, gfvats(d, "num"), "22") | ||||
| 	tst.AssertEqual(t, gfvats(d, "_id"), "1") | ||||
| 	tst.AssertEqual(t, gfvats(d, "ptr"), "4") | ||||
| 	tst.AssertEqual(t, gfvats(d, "ptr2"), "") | ||||
| 	tst.AssertEqual(t, gfvats(d, "fff"), "22.5") | ||||
| } | ||||
							
								
								
									
										231
									
								
								wmo/wrapper.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										231
									
								
								wmo/wrapper.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,231 @@ | ||||
| package wmo | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"go.mongodb.org/mongo-driver/bson" | ||||
| 	"go.mongodb.org/mongo-driver/mongo" | ||||
| 	"go.mongodb.org/mongo-driver/mongo/options" | ||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| ) | ||||
|  | ||||
| type fullTypeRef[TData any] struct { | ||||
| 	IsPointer      bool | ||||
| 	Kind           reflect.Kind | ||||
| 	Type           reflect.Type | ||||
| 	UnderlyingType reflect.Type | ||||
| 	Name           string | ||||
| 	Index          []int | ||||
| } | ||||
|  | ||||
| type Coll[TData any] struct { | ||||
| 	coll        *mongo.Collection | ||||
| 	dataTypeMap map[string]fullTypeRef[TData] | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Collection() *mongo.Collection { | ||||
| 	return c.coll | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Name() string { | ||||
| 	return c.coll.Name() | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Indexes() mongo.IndexView { | ||||
| 	return c.coll.Indexes() | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Drop(ctx context.Context) error { | ||||
| 	return c.coll.Drop(ctx) | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) FindOne(ctx context.Context, filter any) (TData, error) { | ||||
| 	var res TData | ||||
|  | ||||
| 	err := c.coll.FindOne(ctx, filter).Decode(&res) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter any) (*TData, error) { | ||||
| 	var res TData | ||||
|  | ||||
| 	err := c.coll.FindOne(ctx, filter).Decode(&res) | ||||
| 	if err == mongo.ErrNoDocuments { | ||||
| 		return nil, nil | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return &res, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) FindOneByID(ctx context.Context, id any) (TData, error) { | ||||
| 	var res TData | ||||
|  | ||||
| 	err := c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id any) (*TData, error) { | ||||
| 	var res TData | ||||
|  | ||||
| 	err := c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res) | ||||
| 	if err == mongo.ErrNoDocuments { | ||||
| 		return nil, nil | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return &res, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Find(ctx context.Context, filter any, opts ...*options.FindOptions) ([]TData, error) { | ||||
| 	cursor, err := c.coll.Find(ctx, filter, opts...) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	res := make([]TData, 0, cursor.RemainingBatchLength()) | ||||
| 	err = cursor.All(ctx, &res) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | ||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	res := make([]TData, 0, cursor.RemainingBatchLength()) | ||||
| 	err = cursor.All(ctx, &res) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) ReplaceOne(ctx context.Context, id any, value TData) error { | ||||
| 	_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, value) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) UpdateOne(ctx context.Context, id any, updateQuery any) error { | ||||
| 	_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) UpdateOneAndReturn(ctx context.Context, id any, updateQuery any) (TData, error) { | ||||
| 	_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	var res TData | ||||
|  | ||||
| 	err = c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), err | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) DeleteOne(ctx context.Context, id any) error { | ||||
| 	_, err := c.coll.DeleteOne(ctx, bson.M{"_id": id}) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | ||||
| 	if inTok.Mode == ct.CTMEnd { | ||||
| 		return make([]TData, 0), ct.End(), nil | ||||
| 	} | ||||
|  | ||||
| 	pipeline := filter.FilterQuery() | ||||
|  | ||||
| 	sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary := filter.Pagination() | ||||
|  | ||||
| 	paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||
| 	if err != nil { | ||||
| 		return nil, ct.CursorToken{}, err | ||||
| 	} | ||||
|  | ||||
| 	pipeline = append(pipeline, paginationPipeline...) | ||||
|  | ||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||
| 	if err != nil { | ||||
| 		return nil, ct.CursorToken{}, err | ||||
| 	} | ||||
|  | ||||
| 	entities := make([]TData, 0, cursor.RemainingBatchLength()+1) | ||||
| 	for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) { | ||||
| 		var entry TData | ||||
| 		err = cursor.Decode(&entry) | ||||
| 		if err != nil { | ||||
| 			return nil, ct.CursorToken{}, err | ||||
| 		} | ||||
| 		entities = append(entities, entry) | ||||
| 	} | ||||
|  | ||||
| 	if pageSize == nil || len(entities) <= *pageSize || !cursor.TryNext(ctx) { | ||||
| 		return entities, ct.End(), nil | ||||
| 	} | ||||
|  | ||||
| 	last := entities[len(entities)-1] | ||||
|  | ||||
| 	nextToken, _ := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) | ||||
|  | ||||
| 	return entities, nextToken, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { | ||||
|  | ||||
| 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | ||||
| 	if err != nil { | ||||
| 		return ct.CursorToken{}, err | ||||
| 	} | ||||
|  | ||||
| 	valueSeconary := "" | ||||
| 	if fieldSecondary != nil && dirSecondary != nil { | ||||
| 		valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary) | ||||
| 		if err != nil { | ||||
| 			return ct.CursorToken{}, err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return ct.CursorToken{ | ||||
| 		Mode:           ct.CTMNormal, | ||||
| 		ValuePrimary:   valuePrimary, | ||||
| 		ValueSecondary: valueSeconary, | ||||
| 		Direction:      dirPrimary, | ||||
| 		PageSize:       langext.Coalesce(pageSize, 0), | ||||
| 		Extra:          ct.Extra{}, | ||||
| 	}, nil | ||||
| } | ||||
		Reference in New Issue
	
	Block a user