Compare commits
21 Commits
Author | SHA1 | Date | |
---|---|---|---|
f7dce4a102
|
|||
45d4fd7101
|
|||
c7df9d2264
|
|||
d0954bf133
|
|||
8affa81bb9
|
|||
fe9ebf0bab
|
|||
a4b5f33d15
|
|||
e89e2c18f2
|
|||
b16d5152c7
|
|||
5fb2f8a312
|
|||
2ad820be8d
|
|||
555096102a
|
|||
d76d7b5cb9
|
|||
6622c9003d
|
|||
b02e1d2e85
|
|||
c338d23070
|
|||
1fbae343a4
|
|||
31418bf0e6
|
|||
6d45f6f667
|
|||
f610a2202c
|
|||
2807299d46
|
6
.idea/goext.iml
generated
6
.idea/goext.iml
generated
@@ -1,6 +1,10 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<module type="WEB_MODULE" version="4">
|
<module type="WEB_MODULE" version="4">
|
||||||
<component name="Go" enabled="true" />
|
<component name="Go" enabled="true">
|
||||||
|
<buildTags>
|
||||||
|
<option name="goVersion" value="1.19" />
|
||||||
|
</buildTags>
|
||||||
|
</component>
|
||||||
<component name="NewModuleRootManager">
|
<component name="NewModuleRootManager">
|
||||||
<content url="file://$MODULE_DIR$" />
|
<content url="file://$MODULE_DIR$" />
|
||||||
<orderEntry type="inheritedJdk" />
|
<orderEntry type="inheritedJdk" />
|
||||||
|
4
Makefile
4
Makefile
@@ -3,7 +3,9 @@ run:
|
|||||||
echo "This is a library - can't be run" && false
|
echo "This is a library - can't be run" && false
|
||||||
|
|
||||||
test:
|
test:
|
||||||
go test ./...
|
# go test ./...
|
||||||
|
which gotestsum || go install gotest.tools/gotestsum@latest
|
||||||
|
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test"
|
||||||
|
|
||||||
version:
|
version:
|
||||||
_data/version.sh
|
_data/version.sh
|
35
README.md
35
README.md
@@ -5,4 +5,37 @@ A collection of general & useful library methods
|
|||||||
|
|
||||||
This should not have any heavy dependencies (gin, mongo, etc) and add missing basic language features...
|
This should not have any heavy dependencies (gin, mongo, etc) and add missing basic language features...
|
||||||
|
|
||||||
Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
|
Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
|
||||||
|
|
||||||
|
|
||||||
|
### Packages:
|
||||||
|
|
||||||
|
| Name | Maintainer | Description |
|
||||||
|
|--------------|------------|---------------------------------------------------------------------------------------------------------------|
|
||||||
|
| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) |
|
||||||
|
| mathext | Mike | Utility/Helper functions for math |
|
||||||
|
| cryptext | Mike | Utility/Helper functions for encryption |
|
||||||
|
| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels |
|
||||||
|
| dataext | Mike | Various useful data structures |
|
||||||
|
| zipext | Mike | Utility for zip/gzip/tar etc |
|
||||||
|
| reflectext | Mike | Utility for golagn reflection |
|
||||||
|
| | | |
|
||||||
|
| mongoext | Mike | Utility/Helper functions for mongodb |
|
||||||
|
| cursortoken | Mike | MongoDB cursortoken implementation |
|
||||||
|
| | | |
|
||||||
|
| totpext | Mike | Implementation of TOTP (2-Factor-Auth) |
|
||||||
|
| termext | Mike | Utilities for terminals (mostly color output) |
|
||||||
|
| confext | Mike | Parses environment configuration into structs |
|
||||||
|
| cmdext | Mike | Runner for external commands/processes |
|
||||||
|
| | | |
|
||||||
|
| sq | Mike | Utility functions for sql based databases |
|
||||||
|
| tst | Mike | Utility functions for unit tests |
|
||||||
|
| | | |
|
||||||
|
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
|
||||||
|
| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps |
|
||||||
|
| | | |
|
||||||
|
| bfcodegen | Mike | Various codegen tools (run via go generate) |
|
||||||
|
| | | |
|
||||||
|
| rext | Mike | Regex Wrapper, wraps regexp with a better interface |
|
||||||
|
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
|
||||||
|
| | | |
|
13
TODO.md
Normal file
13
TODO.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
|
||||||
|
|
||||||
|
- cronext
|
||||||
|
|
||||||
|
- cursortoken
|
||||||
|
|
||||||
|
- typed/geenric mongo wrapper
|
||||||
|
|
||||||
|
- error package
|
||||||
|
|
||||||
|
- rfctime.DateOnly
|
||||||
|
- rfctime.HMSTimeOnly
|
||||||
|
- rfctime.NanoTimeOnly
|
@@ -23,6 +23,8 @@ fi
|
|||||||
|
|
||||||
git pull --ff
|
git pull --ff
|
||||||
|
|
||||||
|
go get -u ./...
|
||||||
|
|
||||||
curr_vers=$(git describe --tags --abbrev=0 | sed 's/v//g')
|
curr_vers=$(git describe --tags --abbrev=0 | sed 's/v//g')
|
||||||
|
|
||||||
next_ver=$(echo "$curr_vers" | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{if(length($NF+1)>length($NF))$(NF-1)++; $NF=sprintf("%0*d", length($NF), ($NF+1)%(10^length($NF))); print}')
|
next_ver=$(echo "$curr_vers" | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{if(length($NF+1)>length($NF))$(NF-1)++; $NF=sprintf("%0*d", length($NF), ($NF+1)%(10^length($NF))); print}')
|
||||||
@@ -32,6 +34,8 @@ echo "> Current Version: ${curr_vers}"
|
|||||||
echo "> Next Version: ${next_ver}"
|
echo "> Next Version: ${next_ver}"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
|
printf "package goext\n\nconst GoextVersion = \"%s\"\n\nconst GoextVersionTimestamp = \"%s\"\n" "${next_ver}" "$( date +"%Y-%m-%dT%H:%M:%S%z" )" > "goextVersion.go"
|
||||||
|
|
||||||
git add --verbose .
|
git add --verbose .
|
||||||
|
|
||||||
msg="v${next_ver}"
|
msg="v${next_ver}"
|
||||||
|
355
bfcodegen/enum-generate.go
Normal file
355
bfcodegen/enum-generate.go
Normal file
@@ -0,0 +1,355 @@
|
|||||||
|
package bfcodegen
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type EnumDefVal struct {
|
||||||
|
VarName string
|
||||||
|
Value string
|
||||||
|
Description *string
|
||||||
|
}
|
||||||
|
|
||||||
|
type EnumDef struct {
|
||||||
|
File string
|
||||||
|
EnumTypeName string
|
||||||
|
Type string
|
||||||
|
Values []EnumDefVal
|
||||||
|
}
|
||||||
|
|
||||||
|
var rexPackage = rext.W(regexp.MustCompile("^package\\s+(?P<name>[A-Za-z0-9_]+)\\s*$"))
|
||||||
|
|
||||||
|
var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*//\\s*(@enum:type).*$"))
|
||||||
|
|
||||||
|
var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$"))
|
||||||
|
|
||||||
|
var rexChecksumConst = rext.W(regexp.MustCompile("const ChecksumGenerator = \"(?P<cs>[A-Za-z0-9_]*)\""))
|
||||||
|
|
||||||
|
func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||||
|
|
||||||
|
files, err := os.ReadDir(sourceDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
oldChecksum := "N/A"
|
||||||
|
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
||||||
|
content, err := os.ReadFile(destFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if m, ok := rexChecksumConst.MatchFirst(string(content)); ok {
|
||||||
|
oldChecksum = m.GroupByName("cs").Value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||||
|
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
||||||
|
|
||||||
|
newChecksumStr := goext.GoextVersion
|
||||||
|
for _, f := range files {
|
||||||
|
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
|
||||||
|
|
||||||
|
if newChecksum != oldChecksum {
|
||||||
|
fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
allEnums := make([]EnumDef, 0)
|
||||||
|
|
||||||
|
pkgname := ""
|
||||||
|
|
||||||
|
for _, f := range files {
|
||||||
|
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||||
|
fileEnums, pn, err := processFile(path.Join(sourceDir, f.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
allEnums = append(allEnums, fileEnums...)
|
||||||
|
|
||||||
|
if pn != "" {
|
||||||
|
pkgname = pn
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkgname == "" {
|
||||||
|
return errors.New("no package name found in any file")
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.WriteFile(destFile, []byte(fmtOutput(newChecksum, allEnums, pkgname)), 0o755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.CommandTimedOut {
|
||||||
|
fmt.Println(res.StdCombined)
|
||||||
|
return errors.New("go fmt timed out")
|
||||||
|
}
|
||||||
|
if res.ExitCode != 0 {
|
||||||
|
fmt.Println(res.StdCombined)
|
||||||
|
return errors.New("go fmt did not succeed")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func processFile(fn string) ([]EnumDef, string, error) {
|
||||||
|
file, err := os.Open(fn)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
bin, err := io.ReadAll(file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := strings.Split(string(bin), "\n")
|
||||||
|
|
||||||
|
enums := make([]EnumDef, 0)
|
||||||
|
|
||||||
|
pkgname := ""
|
||||||
|
|
||||||
|
for i, line := range lines {
|
||||||
|
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if match, ok := rexPackage.MatchFirst(line); i == 0 && ok {
|
||||||
|
pkgname = match.GroupByName("name").Value()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if match, ok := rexEnumDef.MatchFirst(line); ok {
|
||||||
|
def := EnumDef{
|
||||||
|
File: fn,
|
||||||
|
EnumTypeName: match.GroupByName("name").Value(),
|
||||||
|
Type: match.GroupByName("type").Value(),
|
||||||
|
Values: make([]EnumDefVal, 0),
|
||||||
|
}
|
||||||
|
enums = append(enums, def)
|
||||||
|
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
if match, ok := rexValueDef.MatchFirst(line); ok {
|
||||||
|
typename := match.GroupByName("type").Value()
|
||||||
|
def := EnumDefVal{
|
||||||
|
VarName: match.GroupByName("name").Value(),
|
||||||
|
Value: match.GroupByName("value").Value(),
|
||||||
|
Description: match.GroupByNameOrEmpty("descr").ValueOrNil(),
|
||||||
|
}
|
||||||
|
|
||||||
|
found := false
|
||||||
|
for i, v := range enums {
|
||||||
|
if v.EnumTypeName == typename {
|
||||||
|
enums[i].Values = append(enums[i].Values, def)
|
||||||
|
found = true
|
||||||
|
if def.Description != nil {
|
||||||
|
fmt.Printf("Found enum value [%s] for '%s' ('%s')\n", def.Value, def.VarName, *def.Description)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return enums, pkgname, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fmtOutput(cs string, enums []EnumDef, pkgname string) string {
|
||||||
|
str := "// Code generated by enum-generate.go DO NOT EDIT.\n"
|
||||||
|
str += "\n"
|
||||||
|
str += "package " + pkgname + "\n"
|
||||||
|
str += "\n"
|
||||||
|
|
||||||
|
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n"
|
||||||
|
str += "\n"
|
||||||
|
|
||||||
|
str += "const ChecksumGenerator = \"" + cs + "\"" + "\n"
|
||||||
|
str += "\n"
|
||||||
|
|
||||||
|
str += "type Enum interface {" + "\n"
|
||||||
|
str += " Valid() bool" + "\n"
|
||||||
|
str += " ValuesAny() []any" + "\n"
|
||||||
|
str += " ValuesMeta() []EnumMetaValue" + "\n"
|
||||||
|
str += " VarName() string" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "type StringEnum interface {" + "\n"
|
||||||
|
str += " Enum" + "\n"
|
||||||
|
str += " String() string" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "type DescriptionEnum interface {" + "\n"
|
||||||
|
str += " Enum" + "\n"
|
||||||
|
str += " Description() string" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "\n"
|
||||||
|
|
||||||
|
str += "type EnumMetaValue struct {" + "\n"
|
||||||
|
str += " VarName string `json:\"varName\"`" + "\n"
|
||||||
|
str += " Value any `json:\"value\"`" + "\n"
|
||||||
|
str += " Description *string `json:\"description\"`" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "\n"
|
||||||
|
|
||||||
|
for _, enumdef := range enums {
|
||||||
|
|
||||||
|
hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil })
|
||||||
|
hasStr := enumdef.Type == "string"
|
||||||
|
|
||||||
|
str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n"
|
||||||
|
str += "//" + "\n"
|
||||||
|
str += "// File: " + enumdef.File + "\n"
|
||||||
|
str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n"
|
||||||
|
str += "// DescrEnum: " + langext.Conditional(hasDescr, "true", "false") + "\n"
|
||||||
|
str += "//" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n"
|
||||||
|
for _, v := range enumdef.Values {
|
||||||
|
str += " " + v.VarName + "," + "\n"
|
||||||
|
}
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
if hasDescr {
|
||||||
|
str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n"
|
||||||
|
for _, v := range enumdef.Values {
|
||||||
|
str += " " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n"
|
||||||
|
}
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n"
|
||||||
|
for _, v := range enumdef.Values {
|
||||||
|
str += " " + v.VarName + ": \"" + v.VarName + "\"," + "\n"
|
||||||
|
}
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n"
|
||||||
|
str += " return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n"
|
||||||
|
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n"
|
||||||
|
str += " return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []EnumMetaValue {" + "\n"
|
||||||
|
str += " return []EnumMetaValue{" + "\n"
|
||||||
|
for _, v := range enumdef.Values {
|
||||||
|
if hasDescr {
|
||||||
|
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
|
||||||
|
} else {
|
||||||
|
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
str += " }" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
if hasStr {
|
||||||
|
str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n"
|
||||||
|
str += " return string(e)" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasDescr {
|
||||||
|
str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n"
|
||||||
|
str += " if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n"
|
||||||
|
str += " return d" + "\n"
|
||||||
|
str += " }" + "\n"
|
||||||
|
str += " return \"\"" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n"
|
||||||
|
str += " if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n"
|
||||||
|
str += " return d" + "\n"
|
||||||
|
str += " }" + "\n"
|
||||||
|
str += " return \"\"" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n"
|
||||||
|
str += " for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n"
|
||||||
|
str += " if string(ev) == vv {" + "\n"
|
||||||
|
str += " return ev, true" + "\n"
|
||||||
|
str += " }" + "\n"
|
||||||
|
str += " }" + "\n"
|
||||||
|
str += " return \"\", false" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n"
|
||||||
|
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
str += "func " + enumdef.EnumTypeName + "ValuesMeta() []EnumMetaValue {" + "\n"
|
||||||
|
str += " return []EnumMetaValue{" + "\n"
|
||||||
|
for _, v := range enumdef.Values {
|
||||||
|
if hasDescr {
|
||||||
|
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
|
||||||
|
} else {
|
||||||
|
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
str += " }" + "\n"
|
||||||
|
str += "}" + "\n"
|
||||||
|
str += "" + "\n"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return str
|
||||||
|
}
|
15
bfcodegen/enum-generate_test.go
Normal file
15
bfcodegen/enum-generate_test.go
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
package bfcodegen
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||||
|
|
||||||
|
err := GenerateEnumSpecs("/home/mike/Code/reiff/badennet/bnet-backend/models", "/home/mike/Code/reiff/badennet/bnet-backend/models/enums_gen.go")
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
t.Fail()
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -8,6 +8,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"reflect"
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -172,6 +173,20 @@ func parseEnvToValue(envval string, fullEnvKey string, rvtype reflect.Type) (ref
|
|||||||
|
|
||||||
return envcvl, nil
|
return envcvl, nil
|
||||||
|
|
||||||
|
} else if rvtype.ConvertibleTo(reflect.TypeOf(false)) {
|
||||||
|
|
||||||
|
if strings.TrimSpace(strings.ToLower(envval)) == "true" {
|
||||||
|
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||||
|
} else if strings.TrimSpace(strings.ToLower(envval)) == "false" {
|
||||||
|
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||||
|
} else if strings.TrimSpace(strings.ToLower(envval)) == "1" {
|
||||||
|
return reflect.ValueOf(false).Convert(rvtype), nil
|
||||||
|
} else if strings.TrimSpace(strings.ToLower(envval)) == "0" {
|
||||||
|
return reflect.ValueOf(false).Convert(rvtype), nil
|
||||||
|
} else {
|
||||||
|
return reflect.Value{}, errors.New(fmt.Sprintf("Failed to parse env-config variable '%s' to <%s, ,bool> (value := '%s')", rvtype.Name(), fullEnvKey, envval))
|
||||||
|
}
|
||||||
|
|
||||||
} else if rvtype.ConvertibleTo(reflect.TypeOf("")) {
|
} else if rvtype.ConvertibleTo(reflect.TypeOf("")) {
|
||||||
|
|
||||||
envcvl := reflect.ValueOf(envval).Convert(rvtype)
|
envcvl := reflect.ValueOf(envval).Convert(rvtype)
|
||||||
|
@@ -68,6 +68,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
|||||||
V7 aliasstring `env:"TEST_V7"`
|
V7 aliasstring `env:"TEST_V7"`
|
||||||
V8 time.Duration `env:"TEST_V8"`
|
V8 time.Duration `env:"TEST_V8"`
|
||||||
V9 time.Time `env:"TEST_V9"`
|
V9 time.Time `env:"TEST_V9"`
|
||||||
|
VA bool `env:"TEST_VA"`
|
||||||
}
|
}
|
||||||
|
|
||||||
data := testdata{
|
data := testdata{
|
||||||
@@ -82,6 +83,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
|||||||
V7: "7",
|
V7: "7",
|
||||||
V8: 9,
|
V8: 9,
|
||||||
V9: time.Unix(1671102873, 0),
|
V9: time.Unix(1671102873, 0),
|
||||||
|
VA: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Setenv("TEST_V1", "846")
|
t.Setenv("TEST_V1", "846")
|
||||||
@@ -93,6 +95,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
|||||||
t.Setenv("TEST_V7", "AAAAAA")
|
t.Setenv("TEST_V7", "AAAAAA")
|
||||||
t.Setenv("TEST_V8", "1min4s")
|
t.Setenv("TEST_V8", "1min4s")
|
||||||
t.Setenv("TEST_V9", "2009-11-10T23:00:00Z")
|
t.Setenv("TEST_V9", "2009-11-10T23:00:00Z")
|
||||||
|
t.Setenv("TEST_VA", "true")
|
||||||
|
|
||||||
err := ApplyEnvOverrides("", &data, ".")
|
err := ApplyEnvOverrides("", &data, ".")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -109,6 +112,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
|||||||
tst.AssertEqual(t, data.V7, "AAAAAA")
|
tst.AssertEqual(t, data.V7, "AAAAAA")
|
||||||
tst.AssertEqual(t, data.V8, time.Second*64)
|
tst.AssertEqual(t, data.V8, time.Second*64)
|
||||||
tst.AssertEqual(t, data.V9, time.Unix(1257894000, 0).UTC())
|
tst.AssertEqual(t, data.V9, time.Unix(1257894000, 0).UTC())
|
||||||
|
tst.AssertEqual(t, data.VA, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestApplyEnvOverridesRecursive(t *testing.T) {
|
func TestApplyEnvOverridesRecursive(t *testing.T) {
|
||||||
|
8
cursortoken/direction.go
Normal file
8
cursortoken/direction.go
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
package cursortoken
|
||||||
|
|
||||||
|
type SortDirection string //@enum:type
|
||||||
|
|
||||||
|
const (
|
||||||
|
SortASC SortDirection = "ASC"
|
||||||
|
SortDESC SortDirection = "DESC"
|
||||||
|
)
|
10
cursortoken/filter.go
Normal file
10
cursortoken/filter.go
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
package cursortoken
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Filter interface {
|
||||||
|
FilterQuery() mongo.Pipeline
|
||||||
|
Pagination() (string, SortDirection, string, SortDirection)
|
||||||
|
}
|
184
cursortoken/token.go
Normal file
184
cursortoken/token.go
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
package cursortoken
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base32"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Mode string
|
||||||
|
|
||||||
|
const (
|
||||||
|
CTMStart Mode = "START"
|
||||||
|
CTMNormal Mode = "NORMAL"
|
||||||
|
CTMEnd Mode = "END"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Extra struct {
|
||||||
|
Timestamp *time.Time
|
||||||
|
Id *string
|
||||||
|
Page *int
|
||||||
|
PageSize *int
|
||||||
|
}
|
||||||
|
|
||||||
|
type CursorToken struct {
|
||||||
|
Mode Mode
|
||||||
|
ValuePrimary string
|
||||||
|
ValueSecondary string
|
||||||
|
Direction SortDirection
|
||||||
|
DirectionSecondary SortDirection
|
||||||
|
PageSize int
|
||||||
|
Extra Extra
|
||||||
|
}
|
||||||
|
|
||||||
|
type cursorTokenSerialize struct {
|
||||||
|
ValuePrimary *string `json:"v1,omitempty"`
|
||||||
|
ValueSecondary *string `json:"v2,omitempty"`
|
||||||
|
Direction *SortDirection `json:"dir,omitempty"`
|
||||||
|
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
|
||||||
|
PageSize *int `json:"size,omitempty"`
|
||||||
|
|
||||||
|
ExtraTimestamp *time.Time `json:"ts,omitempty"`
|
||||||
|
ExtraId *string `json:"id,omitempty"`
|
||||||
|
ExtraPage *int `json:"pg,omitempty"`
|
||||||
|
ExtraPageSize *int `json:"sz,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func Start() CursorToken {
|
||||||
|
return CursorToken{
|
||||||
|
Mode: CTMStart,
|
||||||
|
ValuePrimary: "",
|
||||||
|
ValueSecondary: "",
|
||||||
|
Direction: "",
|
||||||
|
DirectionSecondary: "",
|
||||||
|
PageSize: 0,
|
||||||
|
Extra: Extra{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func End() CursorToken {
|
||||||
|
return CursorToken{
|
||||||
|
Mode: CTMEnd,
|
||||||
|
ValuePrimary: "",
|
||||||
|
ValueSecondary: "",
|
||||||
|
Direction: "",
|
||||||
|
DirectionSecondary: "",
|
||||||
|
PageSize: 0,
|
||||||
|
Extra: Extra{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *CursorToken) Token() string {
|
||||||
|
if c.Mode == CTMStart {
|
||||||
|
return "@start"
|
||||||
|
}
|
||||||
|
if c.Mode == CTMEnd {
|
||||||
|
return "@end"
|
||||||
|
}
|
||||||
|
|
||||||
|
// We kinda manually implement omitempty for the CursorToken here
|
||||||
|
// because omitempty does not work for time.Time and otherwise we would always
|
||||||
|
// get weird time values when decoding a token that initially didn't have an Timestamp set
|
||||||
|
// For this usecase we treat Unix=0 as an empty timestamp
|
||||||
|
|
||||||
|
sertok := cursorTokenSerialize{}
|
||||||
|
|
||||||
|
if c.ValuePrimary != "" {
|
||||||
|
sertok.ValuePrimary = &c.ValuePrimary
|
||||||
|
}
|
||||||
|
if c.ValueSecondary != "" {
|
||||||
|
sertok.ValueSecondary = &c.ValueSecondary
|
||||||
|
}
|
||||||
|
if c.Direction != "" {
|
||||||
|
sertok.Direction = &c.Direction
|
||||||
|
}
|
||||||
|
if c.DirectionSecondary != "" {
|
||||||
|
sertok.DirectionSecondary = &c.DirectionSecondary
|
||||||
|
}
|
||||||
|
if c.PageSize != 0 {
|
||||||
|
sertok.PageSize = &c.PageSize
|
||||||
|
}
|
||||||
|
|
||||||
|
sertok.ExtraTimestamp = c.Extra.Timestamp
|
||||||
|
sertok.ExtraId = c.Extra.Id
|
||||||
|
sertok.ExtraPage = c.Extra.Page
|
||||||
|
sertok.ExtraPageSize = c.Extra.PageSize
|
||||||
|
|
||||||
|
body, err := json.Marshal(sertok)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return "tok_" + base32.StdEncoding.EncodeToString(body)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Decode(tok string) (CursorToken, error) {
|
||||||
|
if tok == "" {
|
||||||
|
return Start(), nil
|
||||||
|
}
|
||||||
|
if strings.ToLower(tok) == "@start" {
|
||||||
|
return Start(), nil
|
||||||
|
}
|
||||||
|
if strings.ToLower(tok) == "@end" {
|
||||||
|
return End(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.HasPrefix(tok, "tok_") {
|
||||||
|
return CursorToken{}, errors.New("could not decode token, missing prefix")
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
|
||||||
|
if err != nil {
|
||||||
|
return CursorToken{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var tokenDeserialize cursorTokenSerialize
|
||||||
|
err = json.Unmarshal(body, &tokenDeserialize)
|
||||||
|
if err != nil {
|
||||||
|
return CursorToken{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
token := CursorToken{Mode: CTMNormal}
|
||||||
|
|
||||||
|
if tokenDeserialize.ValuePrimary != nil {
|
||||||
|
token.ValuePrimary = *tokenDeserialize.ValuePrimary
|
||||||
|
}
|
||||||
|
if tokenDeserialize.ValueSecondary != nil {
|
||||||
|
token.ValueSecondary = *tokenDeserialize.ValueSecondary
|
||||||
|
}
|
||||||
|
if tokenDeserialize.Direction != nil {
|
||||||
|
token.Direction = *tokenDeserialize.Direction
|
||||||
|
}
|
||||||
|
if tokenDeserialize.DirectionSecondary != nil {
|
||||||
|
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
|
||||||
|
}
|
||||||
|
if tokenDeserialize.PageSize != nil {
|
||||||
|
token.PageSize = *tokenDeserialize.PageSize
|
||||||
|
}
|
||||||
|
|
||||||
|
token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp
|
||||||
|
token.Extra.Id = tokenDeserialize.ExtraId
|
||||||
|
token.Extra.Page = tokenDeserialize.ExtraPage
|
||||||
|
token.Extra.PageSize = tokenDeserialize.ExtraPageSize
|
||||||
|
|
||||||
|
return token, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) {
|
||||||
|
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
|
||||||
|
return oid, true
|
||||||
|
} else {
|
||||||
|
return primitive.ObjectID{}, false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) {
|
||||||
|
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
|
||||||
|
return oid, true
|
||||||
|
} else {
|
||||||
|
return primitive.ObjectID{}, false
|
||||||
|
}
|
||||||
|
}
|
20
go.mod
20
go.mod
@@ -3,12 +3,22 @@ module gogs.mikescher.com/BlackForestBytes/goext
|
|||||||
go 1.19
|
go 1.19
|
||||||
|
|
||||||
require (
|
require (
|
||||||
golang.org/x/sys v0.3.0
|
github.com/jmoiron/sqlx v1.3.5
|
||||||
golang.org/x/term v0.3.0
|
go.mongodb.org/mongo-driver v1.11.7
|
||||||
|
golang.org/x/crypto v0.9.0
|
||||||
|
golang.org/x/sys v0.8.0
|
||||||
|
golang.org/x/term v0.8.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/jmoiron/sqlx v1.3.5 // indirect
|
github.com/golang/snappy v0.0.4 // indirect
|
||||||
go.mongodb.org/mongo-driver v1.11.1 // indirect
|
github.com/klauspost/compress v1.16.5 // indirect
|
||||||
golang.org/x/crypto v0.4.0 // indirect
|
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
|
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||||
|
github.com/xdg-go/scram v1.1.2 // indirect
|
||||||
|
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||||
|
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
|
||||||
|
golang.org/x/sync v0.2.0 // indirect
|
||||||
|
golang.org/x/text v0.9.0 // indirect
|
||||||
)
|
)
|
||||||
|
73
go.sum
73
go.sum
@@ -1,50 +1,115 @@
|
|||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
||||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
|
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
|
||||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
|
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||||
|
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
|
github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM=
|
||||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
||||||
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
||||||
|
github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc=
|
||||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||||
|
github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI=
|
||||||
|
github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||||
|
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
||||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
|
||||||
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||||
|
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0=
|
||||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||||
|
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||||
|
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
||||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||||
|
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||||
|
github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E=
|
||||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||||
|
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
|
||||||
|
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
||||||
|
github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs=
|
||||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||||
|
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||||
|
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||||
|
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA=
|
||||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||||
|
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
|
||||||
|
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
|
||||||
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
go.mongodb.org/mongo-driver v1.11.1 h1:QP0znIRTuL0jf1oBQoAoM0C6ZJfBK4kx0Uumtv1A7w8=
|
go.mongodb.org/mongo-driver v1.11.1 h1:QP0znIRTuL0jf1oBQoAoM0C6ZJfBK4kx0Uumtv1A7w8=
|
||||||
go.mongodb.org/mongo-driver v1.11.1/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8=
|
go.mongodb.org/mongo-driver v1.11.1/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8=
|
||||||
|
go.mongodb.org/mongo-driver v1.11.7 h1:LIwYxASDLGUg/8wOhgOOZhX8tQa/9tgZPgzZoVqJvcs=
|
||||||
|
go.mongodb.org/mongo-driver v1.11.7/go.mod h1:G9TgswdsWjX4tmDA5zfs2+6AEPpYJwqblyjsfuh8oXY=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||||
golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8=
|
golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8=
|
||||||
golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80=
|
golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80=
|
||||||
|
golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g=
|
||||||
|
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=
|
||||||
|
golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
|
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
|
||||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
|
||||||
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.1.0 h1:g6Z6vPFA9dYBAF7DWcH6sCcOntplXsDKcliusYijMlw=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
|
||||||
golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
|
golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
|
||||||
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
|
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
|
||||||
|
golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols=
|
||||||
|
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||||
|
golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM=
|
||||||
|
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
|
||||||
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
5
goextVersion.go
Normal file
5
goextVersion.go
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
package goext
|
||||||
|
|
||||||
|
const GoextVersion = "0.0.148"
|
||||||
|
|
||||||
|
const GoextVersionTimestamp = "2023-06-07T17:22:38+0200"
|
@@ -4,6 +4,12 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// PTrue := &true
|
||||||
|
var PTrue = Ptr(true)
|
||||||
|
|
||||||
|
// PFalse := &false
|
||||||
|
var PFalse = Ptr(false)
|
||||||
|
|
||||||
func Ptr[T any](v T) *T {
|
func Ptr[T any](v T) *T {
|
||||||
return &v
|
return &v
|
||||||
}
|
}
|
||||||
|
@@ -1,10 +1,10 @@
|
|||||||
package langext
|
package reflectext
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
)
|
)
|
||||||
|
|
||||||
var reflectBasicTypes = []reflect.Type{
|
var reflectBasicTypes = map[reflect.Kind]reflect.Type{
|
||||||
reflect.Bool: reflect.TypeOf(false),
|
reflect.Bool: reflect.TypeOf(false),
|
||||||
reflect.Int: reflect.TypeOf(int(0)),
|
reflect.Int: reflect.TypeOf(int(0)),
|
||||||
reflect.Int8: reflect.TypeOf(int8(0)),
|
reflect.Int8: reflect.TypeOf(int8(0)),
|
||||||
@@ -109,3 +109,28 @@ func TryCast[T any](v any) (T, bool) {
|
|||||||
|
|
||||||
return r4, true
|
return r4, true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TryCastType(v any, dest reflect.Type) (any, bool) {
|
||||||
|
|
||||||
|
underlying := Underlying(reflect.TypeOf(v))
|
||||||
|
|
||||||
|
if underlying != Underlying(dest) {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
r1 := reflect.ValueOf(v)
|
||||||
|
|
||||||
|
if !r1.CanConvert(underlying) {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
r2 := r1.Convert(underlying)
|
||||||
|
|
||||||
|
if !r2.CanConvert(dest) {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
r4 := r2.Convert(dest)
|
||||||
|
|
||||||
|
return r4.Interface(), true
|
||||||
|
}
|
136
reflectext/primStrSer.go
Normal file
136
reflectext/primStrSer.go
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
package reflectext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var primitiveSerializer = map[reflect.Type]genSerializer{
|
||||||
|
|
||||||
|
reflect.TypeOf(""): newGenSerializer(serStringToString, serStringToString),
|
||||||
|
|
||||||
|
reflect.TypeOf(int(0)): newGenSerializer(serIntNumToString[int], serStringToSIntNum[int]),
|
||||||
|
reflect.TypeOf(int32(0)): newGenSerializer(serIntNumToString[int32], serStringToSIntNum[int32]),
|
||||||
|
reflect.TypeOf(int64(0)): newGenSerializer(serIntNumToString[int64], serStringToSIntNum[int64]),
|
||||||
|
|
||||||
|
reflect.TypeOf(uint(0)): newGenSerializer(serIntNumToString[uint], serStringToUIntNum[uint]),
|
||||||
|
reflect.TypeOf(uint32(0)): newGenSerializer(serIntNumToString[uint32], serStringToUIntNum[uint32]),
|
||||||
|
reflect.TypeOf(uint64(0)): newGenSerializer(serIntNumToString[uint64], serStringToUIntNum[uint64]),
|
||||||
|
|
||||||
|
reflect.TypeOf(float32(0)): newGenSerializer(serFloatNumToString[float32], serStringToFloatNum[float32]),
|
||||||
|
reflect.TypeOf(float64(0)): newGenSerializer(serFloatNumToString[float64], serStringToFloatNum[float64]),
|
||||||
|
|
||||||
|
reflect.TypeOf(true): newGenSerializer(serBoolToString, serStringToBool),
|
||||||
|
|
||||||
|
reflect.TypeOf(primitive.ObjectID{}): newGenSerializer(serObjectIDToString, serStringToObjectID),
|
||||||
|
|
||||||
|
reflect.TypeOf(time.Time{}): newGenSerializer(serTimeToString, serStringToTime),
|
||||||
|
}
|
||||||
|
|
||||||
|
type genSerializer struct {
|
||||||
|
ToString func(v any) (string, error)
|
||||||
|
FromString func(v string) (any, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGenSerializer[TData any](tostr func(v TData) (string, error), fromstr func(v string) (TData, error)) genSerializer {
|
||||||
|
return genSerializer{
|
||||||
|
ToString: func(v any) (string, error) {
|
||||||
|
if tdv, ok := v.(TData); ok {
|
||||||
|
rv, err := tostr(tdv)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return rv, nil
|
||||||
|
} else {
|
||||||
|
return "", errors.New(fmt.Sprintf("cannot convert type %T to TData (%T)", v, *new(TData)))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
FromString: func(v string) (any, error) {
|
||||||
|
nv, err := fromstr(v)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return nv, nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func serStringToString(v string) (string, error) {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serIntNumToString[TNum langext.IntegerConstraint](v TNum) (string, error) {
|
||||||
|
return strconv.FormatInt(int64(v), 10), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serStringToSIntNum[TNum langext.SignedConstraint](v string) (TNum, error) {
|
||||||
|
r, err := strconv.ParseInt(v, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return TNum(r), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serStringToUIntNum[TNum langext.UnsignedConstraint](v string) (TNum, error) {
|
||||||
|
r, err := strconv.ParseUint(v, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return TNum(r), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serFloatNumToString[TNum langext.FloatConstraint](v TNum) (string, error) {
|
||||||
|
return strconv.FormatFloat(float64(v), 'f', -1, 64), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serStringToFloatNum[TNum langext.FloatConstraint](v string) (TNum, error) {
|
||||||
|
r, err := strconv.ParseFloat(v, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return TNum(r), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serBoolToString(v bool) (string, error) {
|
||||||
|
return langext.Conditional(v, "true", "false"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serStringToBool(v string) (bool, error) {
|
||||||
|
if strings.ToLower(v) == "true" {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
if strings.ToLower(v) == "false" {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
return false, errors.New(fmt.Sprintf("invalid boolean value '%s'", v))
|
||||||
|
}
|
||||||
|
|
||||||
|
func serObjectIDToString(v primitive.ObjectID) (string, error) {
|
||||||
|
return v.Hex(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serStringToObjectID(v string) (primitive.ObjectID, error) {
|
||||||
|
if rv, err := primitive.ObjectIDFromHex(v); err == nil {
|
||||||
|
return rv, nil
|
||||||
|
} else {
|
||||||
|
return primitive.ObjectID{}, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func serTimeToString(v time.Time) (string, error) {
|
||||||
|
return v.Format(time.RFC3339Nano), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func serStringToTime(v string) (time.Time, error) {
|
||||||
|
if rv, err := time.Parse(time.RFC3339Nano, v); err == nil {
|
||||||
|
return rv, nil
|
||||||
|
} else {
|
||||||
|
return time.Time{}, err
|
||||||
|
}
|
||||||
|
}
|
88
reflectext/primitiveStringSerializer.go
Normal file
88
reflectext/primitiveStringSerializer.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
package reflectext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PrimitiveStringSerializer is used to serialize primitive types (and a few more) from and to string
|
||||||
|
// This is not really intended to be user facing, and more as a simple building block for other mechanisms
|
||||||
|
// supports:
|
||||||
|
// - golang primitives (ints, uints, floats, bool, string)
|
||||||
|
// - type aliases
|
||||||
|
// - time.Time
|
||||||
|
// - primitive.ObjectID
|
||||||
|
type PrimitiveStringSerializer struct{}
|
||||||
|
|
||||||
|
func (pss PrimitiveStringSerializer) ValueToString(v any) (string, error) {
|
||||||
|
|
||||||
|
inType := reflect.TypeOf(v)
|
||||||
|
|
||||||
|
if inType.Kind() == reflect.Ptr && langext.IsNil(v) {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if inType.Kind() == reflect.Ptr {
|
||||||
|
rval1 := reflect.ValueOf(v)
|
||||||
|
rval2 := rval1.Elem()
|
||||||
|
rval3 := rval2.Interface()
|
||||||
|
return pss.ValueToString(rval3)
|
||||||
|
}
|
||||||
|
|
||||||
|
if conv, ok := primitiveSerializer[inType]; ok {
|
||||||
|
return conv.ToString(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
for convType, conv := range primitiveSerializer {
|
||||||
|
if castV, ok := TryCastType(v, convType); ok {
|
||||||
|
return conv.ToString(castV)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", errors.New(fmt.Sprintf("failed to find a matching generic <toString> conversion fo type %T", v))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pss PrimitiveStringSerializer) ValueFromString(str string, outType reflect.Type) (any, error) {
|
||||||
|
|
||||||
|
if str == "" {
|
||||||
|
return reflect.Zero(outType).Interface(), nil // = nil.(outType), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if outType.Kind() == reflect.Ptr {
|
||||||
|
|
||||||
|
innerValue, err := pss.ValueFromString(str, outType.Elem())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// this weird piece of shit converts innerValue to &innerValue (while keeping types)
|
||||||
|
|
||||||
|
rval1 := reflect.ValueOf(innerValue)
|
||||||
|
rval2 := rval1.Convert(outType.Elem())
|
||||||
|
rval3 := reflect.New(outType.Elem())
|
||||||
|
rval3.Elem().Set(rval2)
|
||||||
|
rval4 := rval3.Interface()
|
||||||
|
|
||||||
|
return rval4, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if conv, ok := primitiveSerializer[outType]; ok {
|
||||||
|
return conv.FromString(str)
|
||||||
|
}
|
||||||
|
|
||||||
|
emptyResultVal := reflect.Zero(outType).Interface()
|
||||||
|
|
||||||
|
for convType, conv := range primitiveSerializer {
|
||||||
|
if _, ok := TryCastType(emptyResultVal, convType); ok {
|
||||||
|
if convVal, err := conv.FromString(str); err == nil {
|
||||||
|
if resVal, ok := TryCastType(convVal, outType); ok {
|
||||||
|
return resVal, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", errors.New(fmt.Sprintf("failed to find a matching generic <toString> conversion fo type %s", outType.String()))
|
||||||
|
}
|
@@ -1,6 +1,7 @@
|
|||||||
package rfctime
|
package rfctime
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
@@ -81,7 +81,7 @@ func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
|
|||||||
return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt))
|
return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt))
|
||||||
}
|
}
|
||||||
var tt time.Time
|
var tt time.Time
|
||||||
err := bson.Unmarshal(data, &tt)
|
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -116,6 +116,12 @@ func (t RFC3339Time) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueRead
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if val.Kind() == reflect.Ptr {
|
||||||
|
val.Set(reflect.ValueOf(&t))
|
||||||
|
} else {
|
||||||
|
val.Set(reflect.ValueOf(t))
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
91
sq/converter.go
Normal file
91
sq/converter.go
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
package sq
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
//TODO UNFINISHED
|
||||||
|
// this is not finished
|
||||||
|
// idea was that we can register converter in the database struct
|
||||||
|
// they get inherited from the transactions
|
||||||
|
// and when marshallingunmarshaling (sq.Query | sq.QueryAll)
|
||||||
|
// or marshaling (sq.InsertSingle)
|
||||||
|
// the types get converter automatically...
|
||||||
|
|
||||||
|
type DBTypeConverter interface {
|
||||||
|
ModelTypeString() string
|
||||||
|
DBTypeString() string
|
||||||
|
ModelToDB(v any) (any, error)
|
||||||
|
DBToModel(v any) (any, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
var ConverterBoolToBit = NewDBTypeConverter[bool, int](func(v bool) (int, error) {
|
||||||
|
return langext.Conditional(v, 1, 0), nil
|
||||||
|
}, func(v int) (bool, error) {
|
||||||
|
if v == 0 {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
if v == 1 {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v))
|
||||||
|
})
|
||||||
|
|
||||||
|
var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) {
|
||||||
|
return v.UnixMilli(), nil
|
||||||
|
}, func(v int64) (time.Time, error) {
|
||||||
|
return time.UnixMilli(v), nil
|
||||||
|
})
|
||||||
|
|
||||||
|
var ConverterOptTimeToUnixMillis = NewDBTypeConverter[*time.Time, *int64](func(v *time.Time) (*int64, error) {
|
||||||
|
if v == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return langext.Ptr(v.UnixMilli()), nil
|
||||||
|
}, func(v *int64) (*time.Time, error) {
|
||||||
|
if v == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return langext.Ptr(time.UnixMilli(*v)), nil
|
||||||
|
})
|
||||||
|
|
||||||
|
type dbTypeConverterImpl[TModelData any, TDBData any] struct {
|
||||||
|
dbTypeString string
|
||||||
|
modelTypeString string
|
||||||
|
todb func(v TModelData) (TDBData, error)
|
||||||
|
tomodel func(v TDBData) (TModelData, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelTypeString() string {
|
||||||
|
return t.modelTypeString
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *dbTypeConverterImpl[TModelData, TDBData]) DBTypeString() string {
|
||||||
|
return t.dbTypeString
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelToDB(v any) (any, error) {
|
||||||
|
if vv, ok := v.(TModelData); ok {
|
||||||
|
return t.todb(vv)
|
||||||
|
}
|
||||||
|
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.modelTypeString, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error) {
|
||||||
|
if vv, ok := v.(TDBData); ok {
|
||||||
|
return t.tomodel(vv)
|
||||||
|
}
|
||||||
|
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter {
|
||||||
|
return &dbTypeConverterImpl[TModelData, TDBData]{
|
||||||
|
dbTypeString: fmt.Sprintf("%T", *new(TDBData)),
|
||||||
|
modelTypeString: fmt.Sprintf("%T", *new(TModelData)),
|
||||||
|
todb: todb,
|
||||||
|
tomodel: tomodel,
|
||||||
|
}
|
||||||
|
}
|
67
wmo/collection.go
Normal file
67
wmo/collection.go
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
type EntityID = any
|
||||||
|
|
||||||
|
type fullTypeRef[TData any] struct {
|
||||||
|
IsPointer bool
|
||||||
|
Kind reflect.Kind
|
||||||
|
RealType reflect.Type
|
||||||
|
Type reflect.Type
|
||||||
|
UnderlyingType reflect.Type
|
||||||
|
Name string
|
||||||
|
Index []int
|
||||||
|
}
|
||||||
|
|
||||||
|
type Coll[TData any] struct {
|
||||||
|
coll *mongo.Collection
|
||||||
|
dataTypeMap map[string]fullTypeRef[TData]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) Collection() *mongo.Collection {
|
||||||
|
return c.coll
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) Name() string {
|
||||||
|
return c.coll.Name()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) Indexes() mongo.IndexView {
|
||||||
|
return c.coll.Indexes()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) Drop(ctx context.Context) error {
|
||||||
|
return c.coll.Drop(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) {
|
||||||
|
|
||||||
|
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)
|
||||||
|
if err != nil {
|
||||||
|
return ct.CursorToken{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
valueSeconary := ""
|
||||||
|
if fieldSecondary != nil && dirSecondary != nil {
|
||||||
|
valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary)
|
||||||
|
if err != nil {
|
||||||
|
return ct.CursorToken{}, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ct.CursorToken{
|
||||||
|
Mode: ct.CTMNormal,
|
||||||
|
ValuePrimary: valuePrimary,
|
||||||
|
ValueSecondary: valueSeconary,
|
||||||
|
Direction: dirPrimary,
|
||||||
|
PageSize: langext.Coalesce(pageSize, 0),
|
||||||
|
Extra: ct.Extra{},
|
||||||
|
}, nil
|
||||||
|
}
|
11
wmo/mongo.go
Normal file
11
wmo/mongo.go
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import "go.mongodb.org/mongo-driver/mongo"
|
||||||
|
|
||||||
|
func W[TData any](collection *mongo.Collection) *Coll[TData] {
|
||||||
|
c := Coll[TData]{coll: collection}
|
||||||
|
|
||||||
|
c.init()
|
||||||
|
|
||||||
|
return &c
|
||||||
|
}
|
87
wmo/pagination.go
Normal file
87
wmo/pagination.go
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
|
||||||
|
|
||||||
|
cond := bson.A{}
|
||||||
|
sort := bson.D{}
|
||||||
|
|
||||||
|
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if sortPrimary == ct.SortASC {
|
||||||
|
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
|
||||||
|
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
|
||||||
|
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
|
||||||
|
} else if sortPrimary == ct.SortDESC {
|
||||||
|
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
|
||||||
|
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
|
||||||
|
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
|
||||||
|
|
||||||
|
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if *sortSecondary == ct.SortASC {
|
||||||
|
|
||||||
|
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
|
||||||
|
cond = append(cond, bson.M{"$and": bson.A{
|
||||||
|
bson.M{fieldPrimary: valuePrimary},
|
||||||
|
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
|
||||||
|
}})
|
||||||
|
|
||||||
|
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
|
||||||
|
|
||||||
|
} else if *sortSecondary == ct.SortDESC {
|
||||||
|
|
||||||
|
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
|
||||||
|
cond = append(cond, bson.M{"$and": bson.A{
|
||||||
|
bson.M{fieldPrimary: valuePrimary},
|
||||||
|
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
|
||||||
|
}})
|
||||||
|
|
||||||
|
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pipeline := make([]bson.D, 0, 3)
|
||||||
|
|
||||||
|
if token.Mode == ct.CTMStart {
|
||||||
|
|
||||||
|
// no gt/lt condition
|
||||||
|
|
||||||
|
} else if token.Mode == ct.CTMNormal {
|
||||||
|
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
|
||||||
|
|
||||||
|
} else if token.Mode == ct.CTMEnd {
|
||||||
|
|
||||||
|
// false
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$eq": bson.A{"1", "0"}}}})
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
return nil, errors.New("unknown ct mode: " + string(token.Mode))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
|
||||||
|
|
||||||
|
if pageSize != nil {
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
|
||||||
|
}
|
||||||
|
|
||||||
|
return pipeline, nil
|
||||||
|
}
|
22
wmo/queryAggregate.go
Normal file
22
wmo/queryAggregate.go
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo/options"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) {
|
||||||
|
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
res := make([]TData, 0, cursor.RemainingBatchLength())
|
||||||
|
err = cursor.All(ctx, &res)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
25
wmo/queryDelete.go
Normal file
25
wmo/queryDelete.go
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (c *Coll[TData]) DeleteOne(ctx context.Context, id EntityID) error {
|
||||||
|
_, err := c.coll.DeleteOne(ctx, bson.M{"_id": id})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) DeleteMany(ctx context.Context, filterQuery bson.M) (*mongo.DeleteResult, error) {
|
||||||
|
res, err := c.coll.DeleteMany(ctx, filterQuery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
73
wmo/queryFind.go
Normal file
73
wmo/queryFind.go
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo/options"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
|
||||||
|
var res TData
|
||||||
|
|
||||||
|
err := c.coll.FindOne(ctx, filter).Decode(&res)
|
||||||
|
if err != nil {
|
||||||
|
return *new(TData), err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
|
||||||
|
var res TData
|
||||||
|
|
||||||
|
err := c.coll.FindOne(ctx, filter).Decode(&res)
|
||||||
|
if err == mongo.ErrNoDocuments {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
|
||||||
|
var res TData
|
||||||
|
|
||||||
|
err := c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res)
|
||||||
|
if err != nil {
|
||||||
|
return *new(TData), err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
|
||||||
|
var res TData
|
||||||
|
|
||||||
|
err := c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res)
|
||||||
|
if err == mongo.ErrNoDocuments {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) {
|
||||||
|
cursor, err := c.coll.Find(ctx, filter, opts...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
res := make([]TData, 0, cursor.RemainingBatchLength())
|
||||||
|
err = cursor.All(ctx, &res)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
22
wmo/queryInsert.go
Normal file
22
wmo/queryInsert.go
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) {
|
||||||
|
insRes, err := c.coll.InsertOne(ctx, valueIn)
|
||||||
|
if err != nil {
|
||||||
|
return *new(TData), err
|
||||||
|
}
|
||||||
|
|
||||||
|
var res TData
|
||||||
|
|
||||||
|
err = c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID}).Decode(&res)
|
||||||
|
if err != nil {
|
||||||
|
return *new(TData), err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
85
wmo/queryList.go
Normal file
85
wmo/queryList.go
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
|
||||||
|
if inTok.Mode == ct.CTMEnd {
|
||||||
|
return make([]TData, 0), ct.End(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
pipeline := filter.FilterQuery()
|
||||||
|
|
||||||
|
pf1, pd1, pf2, pd2 := filter.Pagination()
|
||||||
|
|
||||||
|
sortPrimary := pf1
|
||||||
|
sortDirPrimary := pd1
|
||||||
|
sortSecondary := &pf2
|
||||||
|
sortDirSecondary := &pd2
|
||||||
|
|
||||||
|
if pf1 == pf2 {
|
||||||
|
sortSecondary = nil
|
||||||
|
sortDirSecondary = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ct.CursorToken{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
pipeline = append(pipeline, paginationPipeline...)
|
||||||
|
|
||||||
|
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ct.CursorToken{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
entities := make([]TData, 0, cursor.RemainingBatchLength()+1)
|
||||||
|
for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) {
|
||||||
|
var entry TData
|
||||||
|
err = cursor.Decode(&entry)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ct.CursorToken{}, err
|
||||||
|
}
|
||||||
|
entities = append(entities, entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
if pageSize == nil || len(entities) <= *pageSize || !cursor.TryNext(ctx) {
|
||||||
|
return entities, ct.End(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
last := entities[len(entities)-1]
|
||||||
|
|
||||||
|
nextToken, _ := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
|
||||||
|
|
||||||
|
return entities, nextToken, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type countRes struct {
|
||||||
|
Count int64 `bson:"c"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) Count(ctx context.Context, filter ct.Filter) (int64, error) {
|
||||||
|
pipeline := filter.FilterQuery()
|
||||||
|
|
||||||
|
pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}})
|
||||||
|
|
||||||
|
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if cursor.Next(ctx) {
|
||||||
|
v := countRes{}
|
||||||
|
err = cursor.Decode(&v)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return v.Count, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0, nil
|
||||||
|
}
|
66
wmo/queryUpdate.go
Normal file
66
wmo/queryUpdate.go
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
"go.mongodb.org/mongo-driver/mongo/options"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (TData, error) {
|
||||||
|
var res TData
|
||||||
|
|
||||||
|
err := c.coll.FindOneAndUpdate(ctx, filterQuery, updateQuery, options.FindOneAndUpdate().SetReturnDocument(options.After)).Decode(&res)
|
||||||
|
if err != nil {
|
||||||
|
return *new(TData), err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error {
|
||||||
|
_, err := c.coll.UpdateOne(ctx, filterQuery, updateQuery)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuery bson.M) error {
|
||||||
|
_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (*mongo.UpdateResult, error) {
|
||||||
|
res, err := c.coll.UpdateMany(ctx, filterQuery, updateQuery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) ReplaceOne(ctx context.Context, id EntityID, value TData) error {
|
||||||
|
_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, value)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, id EntityID, value TData) (TData, error) {
|
||||||
|
var res TData
|
||||||
|
|
||||||
|
err := c.coll.FindOneAndUpdate(ctx, bson.M{"_id": id}, value, options.FindOneAndUpdate().SetReturnDocument(options.After)).Decode(&res)
|
||||||
|
if err != nil {
|
||||||
|
return *new(TData), err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
106
wmo/reflection.go
Normal file
106
wmo/reflection.go
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/reflectext"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (c *Coll[TData]) init() {
|
||||||
|
|
||||||
|
c.dataTypeMap = make(map[string]fullTypeRef[TData])
|
||||||
|
|
||||||
|
example := *new(TData)
|
||||||
|
|
||||||
|
v := reflect.ValueOf(example)
|
||||||
|
|
||||||
|
c.initFields("", v, make([]int, 0))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, idxarr []int) {
|
||||||
|
|
||||||
|
rtyp := rval.Type()
|
||||||
|
|
||||||
|
for i := 0; i < rtyp.NumField(); i++ {
|
||||||
|
|
||||||
|
rsfield := rtyp.Field(i)
|
||||||
|
rvfield := rval.Field(i)
|
||||||
|
|
||||||
|
if !rsfield.IsExported() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
bsonkey, found := rsfield.Tag.Lookup("bson")
|
||||||
|
if !found || bsonkey == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fullKey := prefix + bsonkey
|
||||||
|
|
||||||
|
newIdxArr := langext.ArrCopy(idxarr)
|
||||||
|
newIdxArr = append(newIdxArr, i)
|
||||||
|
|
||||||
|
if rvfield.Type().Kind() == reflect.Pointer {
|
||||||
|
|
||||||
|
c.dataTypeMap[fullKey] = fullTypeRef[TData]{
|
||||||
|
IsPointer: true,
|
||||||
|
RealType: rvfield.Type(),
|
||||||
|
Kind: rvfield.Type().Elem().Kind(),
|
||||||
|
Type: rvfield.Type().Elem(),
|
||||||
|
UnderlyingType: reflectext.Underlying(rvfield.Type().Elem()),
|
||||||
|
Name: rsfield.Name,
|
||||||
|
Index: newIdxArr,
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
c.dataTypeMap[fullKey] = fullTypeRef[TData]{
|
||||||
|
IsPointer: false,
|
||||||
|
RealType: rvfield.Type(),
|
||||||
|
Kind: rvfield.Type().Kind(),
|
||||||
|
Type: rvfield.Type(),
|
||||||
|
UnderlyingType: reflectext.Underlying(rvfield.Type()),
|
||||||
|
Name: rsfield.Name,
|
||||||
|
Index: newIdxArr,
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if rvfield.Kind() == reflect.Struct {
|
||||||
|
c.initFields(fullKey+".", rvfield, newIdxArr)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) getTokenValueAsMongoType(value string, fieldName string) (any, error) {
|
||||||
|
|
||||||
|
fref := c.dataTypeMap[fieldName]
|
||||||
|
|
||||||
|
pss := reflectext.PrimitiveStringSerializer{}
|
||||||
|
|
||||||
|
return pss.ValueFromString(value, fref.RealType)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) getFieldValueAsTokenString(entity TData, fieldName string) (string, error) {
|
||||||
|
|
||||||
|
realValue := c.getFieldValue(entity, fieldName)
|
||||||
|
|
||||||
|
pss := reflectext.PrimitiveStringSerializer{}
|
||||||
|
|
||||||
|
return pss.ValueToString(realValue)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) getFieldType(fieldName string) fullTypeRef[TData] {
|
||||||
|
return c.dataTypeMap[fieldName]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Coll[TData]) getFieldValue(data TData, fieldName string) any {
|
||||||
|
fref := c.dataTypeMap[fieldName]
|
||||||
|
rval := reflect.ValueOf(data)
|
||||||
|
return rval.FieldByIndex(fref.Index).Interface()
|
||||||
|
}
|
179
wmo/reflection_test.go
Normal file
179
wmo/reflection_test.go
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
package wmo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/rfctime"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReflectionGetFieldType(t *testing.T) {
|
||||||
|
|
||||||
|
type IDType string
|
||||||
|
|
||||||
|
type TestData struct {
|
||||||
|
ID IDType `bson:"_id"`
|
||||||
|
CDate time.Time `bson:"cdate"`
|
||||||
|
Sub struct {
|
||||||
|
A string `bson:"a"`
|
||||||
|
} `bson:"sub"`
|
||||||
|
Str string `bson:"str"`
|
||||||
|
Ptr *int `bson:"ptr"`
|
||||||
|
MDate rfctime.RFC3339NanoTime `bson:"mdate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
coll := W[TestData](&mongo.Collection{})
|
||||||
|
|
||||||
|
coll.init()
|
||||||
|
|
||||||
|
t0 := time.Now()
|
||||||
|
t1 := rfctime.NewRFC3339Nano(t0)
|
||||||
|
|
||||||
|
d := TestData{
|
||||||
|
ID: "1",
|
||||||
|
CDate: t0,
|
||||||
|
Sub: struct {
|
||||||
|
A string `bson:"a"`
|
||||||
|
}{
|
||||||
|
A: "2",
|
||||||
|
},
|
||||||
|
Str: "3",
|
||||||
|
Ptr: langext.Ptr(4),
|
||||||
|
MDate: t1,
|
||||||
|
}
|
||||||
|
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("_id").Kind.String(), "string")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("_id").Type.String(), "wmo.IDType")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("_id").Name, "ID")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("_id").IsPointer, false)
|
||||||
|
tst.AssertEqual(t, coll.getFieldValue(d, "_id").(IDType), "1")
|
||||||
|
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("cdate").Kind.String(), "struct")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("cdate").Type.String(), "time.Time")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("cdate").Name, "CDate")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("cdate").IsPointer, false)
|
||||||
|
tst.AssertEqual(t, coll.getFieldValue(d, "cdate").(time.Time), t0)
|
||||||
|
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("sub.a").Kind.String(), "string")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("sub.a").Type.String(), "string")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("sub.a").Name, "A")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("sub.a").IsPointer, false)
|
||||||
|
tst.AssertEqual(t, coll.getFieldValue(d, "sub.a").(string), "2")
|
||||||
|
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("str").Kind.String(), "string")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("str").Type.String(), "string")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("str").Name, "Str")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("str").IsPointer, false)
|
||||||
|
tst.AssertEqual(t, coll.getFieldValue(d, "str").(string), "3")
|
||||||
|
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("ptr").Kind.String(), "int")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("ptr").Type.String(), "int")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("ptr").Name, "Ptr")
|
||||||
|
tst.AssertEqual(t, coll.getFieldType("ptr").IsPointer, true)
|
||||||
|
tst.AssertEqual(t, *coll.getFieldValue(d, "ptr").(*int), 4)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReflectionGetTokenValueAsMongoType(t *testing.T) {
|
||||||
|
|
||||||
|
type IDType string
|
||||||
|
|
||||||
|
type TestData struct {
|
||||||
|
ID IDType `bson:"_id"`
|
||||||
|
CDate time.Time `bson:"cdate"`
|
||||||
|
Sub struct {
|
||||||
|
A string `bson:"a"`
|
||||||
|
} `bson:"sub"`
|
||||||
|
Str string `bson:"str"`
|
||||||
|
Ptr *int `bson:"ptr"`
|
||||||
|
Num int `bson:"num"`
|
||||||
|
MDate rfctime.RFC3339NanoTime `bson:"mdate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
coll := W[TestData](&mongo.Collection{})
|
||||||
|
|
||||||
|
coll.init()
|
||||||
|
|
||||||
|
gtvasmt := func(value string, fieldName string) any {
|
||||||
|
v, err := coll.getTokenValueAsMongoType(value, fieldName)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("%s", "failed to getTokenValueAsMongoType")
|
||||||
|
t.Errorf("%v+", err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
tx, err := time.Parse(time.RFC3339Nano, "2009-11-10T23:00:00Z")
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("%v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
tst.AssertEqual(t, gtvasmt("hello", "str").(string), "hello")
|
||||||
|
tst.AssertEqual(t, gtvasmt("4", "num").(int), 4)
|
||||||
|
tst.AssertEqual(t, gtvasmt("asdf", "_id").(IDType), "asdf")
|
||||||
|
tst.AssertEqual(t, gtvasmt("", "ptr").(*int), nil)
|
||||||
|
tst.AssertEqual(t, *(gtvasmt("123", "ptr").(*int)), 123)
|
||||||
|
tst.AssertEqual(t, gtvasmt("2009-11-10T23:00:00Z", "cdate").(time.Time), tx)
|
||||||
|
tst.AssertEqual(t, gtvasmt("2009-11-10T23:00:00Z", "mdate").(rfctime.RFC3339NanoTime), rfctime.NewRFC3339Nano(tx))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReflectionGetFieldValueAsTokenString(t *testing.T) {
|
||||||
|
|
||||||
|
type IDType string
|
||||||
|
|
||||||
|
type TestData struct {
|
||||||
|
ID IDType `bson:"_id"`
|
||||||
|
CDate time.Time `bson:"cdate"`
|
||||||
|
Sub struct {
|
||||||
|
A string `bson:"a"`
|
||||||
|
} `bson:"sub"`
|
||||||
|
Str string `bson:"str"`
|
||||||
|
Ptr *int `bson:"ptr"`
|
||||||
|
Num int `bson:"num"`
|
||||||
|
Ptr2 *int `bson:"ptr2"`
|
||||||
|
FFF float64 `bson:"fff"`
|
||||||
|
MDate rfctime.RFC3339NanoTime `bson:"mdate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
coll := W[TestData](&mongo.Collection{})
|
||||||
|
|
||||||
|
coll.init()
|
||||||
|
|
||||||
|
t0 := time.Date(2000, 1, 1, 12, 0, 0, 0, timeext.TimezoneBerlin)
|
||||||
|
t1 := rfctime.NewRFC3339Nano(t0)
|
||||||
|
|
||||||
|
d := TestData{
|
||||||
|
ID: "1",
|
||||||
|
CDate: t0,
|
||||||
|
MDate: t1,
|
||||||
|
Sub: struct {
|
||||||
|
A string `bson:"a"`
|
||||||
|
}{
|
||||||
|
A: "2",
|
||||||
|
},
|
||||||
|
Str: "3",
|
||||||
|
Ptr: langext.Ptr(4),
|
||||||
|
Num: 22,
|
||||||
|
FFF: 22.5,
|
||||||
|
Ptr2: nil,
|
||||||
|
}
|
||||||
|
|
||||||
|
gfvats := func(value TestData, fieldName string) string {
|
||||||
|
v, err := coll.getFieldValueAsTokenString(value, fieldName)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("%s: %v", "failed to getTokenValueAsMongoType", err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
tst.AssertEqual(t, gfvats(d, "str"), "3")
|
||||||
|
tst.AssertEqual(t, gfvats(d, "num"), "22")
|
||||||
|
tst.AssertEqual(t, gfvats(d, "_id"), "1")
|
||||||
|
tst.AssertEqual(t, gfvats(d, "ptr"), "4")
|
||||||
|
tst.AssertEqual(t, gfvats(d, "ptr2"), "")
|
||||||
|
tst.AssertEqual(t, gfvats(d, "fff"), "22.5")
|
||||||
|
tst.AssertEqual(t, gfvats(d, "cdate"), t0.Format(time.RFC3339Nano))
|
||||||
|
tst.AssertEqual(t, gfvats(d, "mdate"), t0.Format(time.RFC3339Nano))
|
||||||
|
}
|
Reference in New Issue
Block a user