Compare commits

...

69 Commits

Author SHA1 Message Date
a127b24e62 v0.0.269 add AssertSetDeepEqual
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m8s
2023-09-25 09:18:22 +02:00
69d6290376 add AssertSetDeepEqual 2023-09-25 09:18:07 +02:00
c08a739158 v0.0.268 added WeekStart() and WeekEnd()
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 50s
2023-09-21 16:29:23 +02:00
5f5f0e44f0 v0.0.267 fix AssertDeepEqual
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 50s
2023-09-21 14:15:02 +02:00
6e6797eac5 fix AssertDeepEqual 2023-09-21 14:14:51 +02:00
cd9406900a v0.0.266 fix tst showing wrong file:line
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m22s
2023-09-21 13:08:13 +02:00
6c81f7f6bc fix tst showing wrong file:line, add DeepEqual 2023-09-21 13:07:55 +02:00
d56a0235af v0.0.265 add ListWithCount
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 49s
2023-09-18 12:57:27 +02:00
de2ca763c1 add function for ListWithCount 2023-09-18 12:56:56 +02:00
da52bb5c90 v0.0.264 added Valid() to id-gen
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 49s
2023-09-18 11:46:17 +02:00
3d4afe7b25 v0.0.263 re-add checksum guard to id-generate
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 50s
2023-09-18 10:43:29 +02:00
f5766d639c v0.0.262 ignore _gen files in bfcodegen checksum-calc
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 46s
2023-09-18 10:42:43 +02:00
cdf2a6e76b v0.0.261 added id-generate.go
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m7s
2023-09-18 10:38:25 +02:00
6d7cfb86f8 v0.0.260 wmo: fix endless recursion in wmo reflection
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 50s
2023-09-12 11:40:39 +02:00
1e9d663ffe fix endless recursion in wmo reflection 2023-09-12 11:39:51 +02:00
5b8d7ebf87 v0.0.259 wmo: allow fields to pointers to structs
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 49s
2023-09-12 10:48:57 +02:00
11dc6d2640 use type instead of value for Reflection in Coll.initFields 2023-09-12 10:47:41 +02:00
29a3f73f15 v0.0.258
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2023-09-11 11:28:34 +02:00
98105642fc removed default sort 2023-09-11 11:28:26 +02:00
0fd5f3b417 v0.0.257 better handling if pagination is faulty in wmo list
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m2s
2023-09-05 15:01:55 +02:00
43cac4b3bb v0.0.256 bind header
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m4s
2023-08-28 10:44:38 +02:00
cd68af8e66 v0.0.255 tuples
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m43s
2023-08-24 09:47:32 +02:00
113d838876 v0.0.254 revert back to 0.0.250
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m14s
2023-08-22 10:49:57 +02:00
9e5bc0d3ea v0.0.253
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m23s
2023-08-22 10:36:35 +02:00
6d3bd13f61 v0.0.252
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m5s
2023-08-22 10:23:04 +02:00
b5ca475b3f v0.0.251 exerr.WithStackSkip
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m9s
2023-08-22 10:21:13 +02:00
a75b1291cb v0.0.250
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 51s
2023-08-21 15:34:27 +02:00
21cd1ee066 v0.0.249 better MDTAny json serialization
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 54s
2023-08-21 15:19:40 +02:00
ae43cbb623 v0.0.248 exerr in wmo package
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 55s
2023-08-21 15:08:35 +02:00
9b752a911c v0.0.247 -.-
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m12s
2023-08-21 14:23:44 +02:00
ec9ac26a4c v0.0.246 timeext
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m11s
2023-08-21 14:15:06 +02:00
39a0b73d56 v0.0.245
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 40s
2023-08-21 13:27:36 +02:00
2e2e15d4d2 v0.0.244
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 48s
2023-08-18 13:27:02 +02:00
0d16946aba v0.0.243
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m18s
2023-08-18 13:25:18 +02:00
14441c2378 Adde gitea workflow: tests
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 57s
2023-08-14 18:39:22 +02:00
f6bcdc9903 Merge remote-tracking branch 'origin/master' 2023-08-14 16:33:03 +02:00
a95053211c Fix tests 2023-08-14 16:32:39 +02:00
813ce71e3e v0.0.242 forgot to return something 2023-08-14 16:05:12 +02:00
56ae0cfc6c v0.0.241 join string array 2023-08-14 15:54:50 +02:00
202afc9068 v0.0.240 2023-08-14 15:36:12 +02:00
56094b3cb6 v0.0.239 pctx.WithTImeout 2023-08-11 16:32:34 +02:00
0da098e9f9 v0.0.238 2023-08-09 19:51:41 +02:00
f0881c9fd6 v0.0.237 parse application/x-www-form-urlencoded in ginext 2023-08-09 19:35:01 +02:00
029b408749 v0.0.236 cmdext.FailOnStdErr 2023-08-09 17:48:06 +02:00
84b2be3169 v0.0.235 added .Enum(..) to exerr 2023-08-09 14:40:16 +02:00
c872cecc67 v0.0.234 2023-08-09 10:39:14 +02:00
99cd92729e v0.0.233 IncludeMetaInGinOutput 2023-08-09 10:37:59 +02:00
ac416f7b69 v0.0.232 2023-08-08 18:01:00 +02:00
e10140e143 v0.0.231 2023-08-08 16:10:31 +02:00
e165f0f62f v0.0.230 2023-08-08 16:09:02 +02:00
655d4daad9 v0.0.229 2023-08-08 16:05:44 +02:00
87a004e577 v0.0.228 bf 2023-08-08 15:33:52 +02:00
376c6cab50 v0.0.227 error on duplicate exerr.ErrorType 2023-08-08 15:28:29 +02:00
4a3f25baa0 v0.0.226 2023-08-08 14:28:09 +02:00
aa33bc8df3 v0.0.225 2023-08-08 13:09:15 +02:00
96b3718375 v0.0.224 implement error.As(x) for exerr 2023-08-08 12:38:22 +02:00
5f9b55933b v0.0.223 2023-08-08 11:52:40 +02:00
74d42637e7 v0.0.222 forgot status code 2023-08-06 19:11:59 +02:00
0c05bcf29b v0.0.221 download file data 2023-08-06 19:10:31 +02:00
9136143f2f v0.0.220 add ginext.bufferBody 2023-08-03 09:09:27 +02:00
2f1b784dc2 v0.0.219 implement error.Is(*) for exerr 2023-07-28 15:42:12 +02:00
190584e0e6 v0.0.218 bf 2023-07-27 17:16:30 +02:00
b7003b9ec9 v0.0.217 2023-07-27 17:12:41 +02:00
4f871271e8 v0.0.216 2023-07-27 17:00:53 +02:00
91f4793678 v0.0.215 Add (ee *ExErr) ToAPIJson 2023-07-27 14:37:11 +02:00
3b30bb049e v0.0.214 reassign innerctx 2023-07-27 09:58:10 +02:00
f0c5b36ea9 v0.0.213 inject gin key value pairs into context 2023-07-27 09:46:06 +02:00
647ec64c3b v0.0.212 2023-07-26 10:44:26 +02:00
b5f9b6b638 v0.0.211 2023-07-26 10:40:42 +02:00
61 changed files with 1694 additions and 308 deletions

View File

@@ -0,0 +1,9 @@
FROM golang:latest
RUN apt install -y make curl python3 && go install gotest.tools/gotestsum@latest
COPY . /source
WORKDIR /source
CMD ["make", "test"]

View File

@@ -0,0 +1,30 @@
# https://docs.gitea.com/next/usage/actions/quickstart
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions
# https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
name: Build Docker and Deploy
run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }}
on: [push]
jobs:
run_tests:
name: Run goext test-suite
runs-on: bfb-cicd-latest
steps:
- name: Check out code
uses: actions/checkout@v3
- name: Build test docker
id: build_docker
run: echo "DOCKER_IMG_ID=$(docker build -q . -f .gitea/workflows/Dockerfile_tests || echo __err_build__)" >> $GITHUB_OUTPUT
- name: Run tests
run: docker run --rm "${{ steps.build_docker.outputs.DOCKER_IMG_ID }}"
- name: Cleanup
if: always()
run: docker image rm "${{ steps.build_docker.outputs.DOCKER_IMG_ID }}"

View File

@@ -5,7 +5,7 @@ run:
test: test:
# go test ./... # go test ./...
which gotestsum || go install gotest.tools/gotestsum@latest which gotestsum || go install gotest.tools/gotestsum@latest
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test" gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./..."
version: version:
_data/version.sh _data/version.sh

12
TODO.md
View File

@@ -2,12 +2,6 @@
- cronext - cronext
- cursortoken - rfctime.DateOnly
- rfctime.HMSTimeOnly
- typed/geenric mongo wrapper - rfctime.NanoTimeOnly
- error package
- rfctime.DateOnly
- rfctime.HMSTimeOnly
- rfctime.NanoTimeOnly

BIN
bfcodegen/_test_example.tgz Normal file

Binary file not shown.

View File

@@ -31,13 +31,13 @@ type EnumDef struct {
Values []EnumDefVal Values []EnumDefVal
} }
var rexPackage = rext.W(regexp.MustCompile("^package\\s+(?P<name>[A-Za-z0-9_]+)\\s*$")) var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*//\\s*(@enum:type).*$")) var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$")) var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`))
var rexChecksumConst = rext.W(regexp.MustCompile("const ChecksumGenerator = \"(?P<cs>[A-Za-z0-9_]*)\"")) var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
func GenerateEnumSpecs(sourceDir string, destFile string) error { func GenerateEnumSpecs(sourceDir string, destFile string) error {
@@ -52,13 +52,14 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
if err != nil { if err != nil {
return err return err
} }
if m, ok := rexChecksumConst.MatchFirst(string(content)); ok { if m, ok := rexEnumChecksumConst.MatchFirst(string(content)); ok {
oldChecksum = m.GroupByName("cs").Value() oldChecksum = m.GroupByName("cs").Value()
} }
} }
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() }) langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
newChecksumStr := goext.GoextVersion newChecksumStr := goext.GoextVersion
@@ -85,7 +86,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
for _, f := range files { for _, f := range files {
fmt.Printf("========= %s =========\n\n", f.Name()) fmt.Printf("========= %s =========\n\n", f.Name())
fileEnums, pn, err := processFile(sourceDir, path.Join(sourceDir, f.Name())) fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()))
if err != nil { if err != nil {
return err return err
} }
@@ -103,7 +104,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
return errors.New("no package name found in any file") return errors.New("no package name found in any file")
} }
err = os.WriteFile(destFile, []byte(fmtOutput(newChecksum, allEnums, pkgname)), 0o755) err = os.WriteFile(destFile, []byte(fmtEnumOutput(newChecksum, allEnums, pkgname)), 0o755)
if err != nil { if err != nil {
return err return err
} }
@@ -125,7 +126,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
return nil return nil
} }
func processFile(basedir string, fn string) ([]EnumDef, string, error) { func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
file, err := os.Open(fn) file, err := os.Open(fn)
if err != nil { if err != nil {
return nil, "", err return nil, "", err
@@ -149,7 +150,7 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
break break
} }
if match, ok := rexPackage.MatchFirst(line); i == 0 && ok { if match, ok := rexEnumPackage.MatchFirst(line); i == 0 && ok {
pkgname = match.GroupByName("name").Value() pkgname = match.GroupByName("name").Value()
continue continue
} }
@@ -172,7 +173,7 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
} }
if match, ok := rexValueDef.MatchFirst(line); ok { if match, ok := rexEnumValueDef.MatchFirst(line); ok {
typename := match.GroupByName("type").Value() typename := match.GroupByName("type").Value()
def := EnumDefVal{ def := EnumDefVal{
VarName: match.GroupByName("name").Value(), VarName: match.GroupByName("name").Value(),
@@ -202,43 +203,17 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
return enums, pkgname, nil return enums, pkgname, nil
} }
func fmtOutput(cs string, enums []EnumDef, pkgname string) string { func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string {
str := "// Code generated by enum-generate.go DO NOT EDIT.\n" str := "// Code generated by enum-generate.go DO NOT EDIT.\n"
str += "\n" str += "\n"
str += "package " + pkgname + "\n" str += "package " + pkgname + "\n"
str += "\n" str += "\n"
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n" str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n"
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/enums\"" + "\n"
str += "\n" str += "\n"
str += "const ChecksumGenerator = \"" + cs + "\"" + "\n" str += "const ChecksumEnumGenerator = \"" + cs + "\" // GoExtVersion: " + goext.GoextVersion + "\n"
str += "\n"
str += "type Enum interface {" + "\n"
str += " Valid() bool" + "\n"
str += " ValuesAny() []any" + "\n"
str += " ValuesMeta() []EnumMetaValue" + "\n"
str += " VarName() string" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "type StringEnum interface {" + "\n"
str += " Enum" + "\n"
str += " String() string" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "type DescriptionEnum interface {" + "\n"
str += " Enum" + "\n"
str += " Description() string" + "\n"
str += "}" + "\n"
str += "\n"
str += "type EnumMetaValue struct {" + "\n"
str += " VarName string `json:\"varName\"`" + "\n"
str += " Value any `json:\"value\"`" + "\n"
str += " Description *string `json:\"description\"`" + "\n"
str += "}" + "\n"
str += "\n" str += "\n"
for _, enumdef := range enums { for _, enumdef := range enums {
@@ -292,7 +267,7 @@ func fmtOutput(cs string, enums []EnumDef, pkgname string) string {
str += "}" + "\n" str += "}" + "\n"
str += "" + "\n" str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []EnumMetaValue {" + "\n" str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []enums.EnumMetaValue {" + "\n"
str += " return " + enumdef.EnumTypeName + "ValuesMeta()" str += " return " + enumdef.EnumTypeName + "ValuesMeta()"
str += "}" + "\n" str += "}" + "\n"
str += "" + "\n" str += "" + "\n"
@@ -322,11 +297,11 @@ func fmtOutput(cs string, enums []EnumDef, pkgname string) string {
str += "}" + "\n" str += "}" + "\n"
str += "" + "\n" str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") Meta() EnumMetaValue {" + "\n" str += "func (e " + enumdef.EnumTypeName + ") Meta() enums.EnumMetaValue {" + "\n"
if hasDescr { if hasDescr {
str += " return EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())}" str += " return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())}"
} else { } else {
str += " return EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}" str += " return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}"
} }
str += "}" + "\n" str += "}" + "\n"
str += "" + "\n" str += "" + "\n"
@@ -346,8 +321,8 @@ func fmtOutput(cs string, enums []EnumDef, pkgname string) string {
str += "}" + "\n" str += "}" + "\n"
str += "" + "\n" str += "" + "\n"
str += "func " + enumdef.EnumTypeName + "ValuesMeta() []EnumMetaValue {" + "\n" str += "func " + enumdef.EnumTypeName + "ValuesMeta() []enums.EnumMetaValue {" + "\n"
str += " return []EnumMetaValue{" + "\n" str += " return []enums.EnumMetaValue{" + "\n"
for _, v := range enumdef.Values { for _, v := range enumdef.Values {
str += " " + v.VarName + ".Meta(),\n" str += " " + v.VarName + ".Meta(),\n"
} }

View File

@@ -1,15 +1,42 @@
package bfcodegen package bfcodegen
import ( import (
_ "embed"
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"os"
"path/filepath"
"testing" "testing"
"time"
) )
func TestApplyEnvOverridesSimple(t *testing.T) { //go:embed _test_example.tgz
var ExampleModels []byte
err := GenerateEnumSpecs("/home/mike/Code/reiff/badennet/bnet-backend/models", "/home/mike/Code/reiff/badennet/bnet-backend/models/enums_gen.go") func TestGenerateEnumSpecs(t *testing.T) {
if err != nil {
t.Error(err) tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
t.Fail()
} tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
err := os.WriteFile(tmpFile, ExampleModels, 0o777)
tst.AssertNoErr(t, err)
t.Cleanup(func() { _ = os.Remove(tmpFile) })
err = os.Mkdir(tmpDir, 0o777)
tst.AssertNoErr(t, err)
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
tst.AssertNoErr(t, err)
err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go")
tst.AssertNoErr(t, err)
err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go")
tst.AssertNoErr(t, err)
} }

236
bfcodegen/id-generate.go Normal file
View File

@@ -0,0 +1,236 @@
package bfcodegen
import (
"errors"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext"
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"io"
"os"
"path"
"path/filepath"
"regexp"
"strings"
"time"
)
type IDDef struct {
File string
FileRelative string
Name string
}
var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`))
var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
func GenerateIDSpecs(sourceDir string, destFile string) error {
files, err := os.ReadDir(sourceDir)
if err != nil {
return err
}
oldChecksum := "N/A"
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
content, err := os.ReadFile(destFile)
if err != nil {
return err
}
if m, ok := rexIDChecksumConst.MatchFirst(string(content)); ok {
oldChecksum = m.GroupByName("cs").Value()
}
}
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
newChecksumStr := goext.GoextVersion
for _, f := range files {
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
if err != nil {
return err
}
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
}
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
if newChecksum != oldChecksum {
fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
} else {
fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
return nil
}
allIDs := make([]IDDef, 0)
pkgname := ""
for _, f := range files {
fmt.Printf("========= %s =========\n\n", f.Name())
fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()))
if err != nil {
return err
}
fmt.Printf("\n")
allIDs = append(allIDs, fileIDs...)
if pn != "" {
pkgname = pn
}
}
if pkgname == "" {
return errors.New("no package name found in any file")
}
err = os.WriteFile(destFile, []byte(fmtIDOutput(newChecksum, allIDs, pkgname)), 0o755)
if err != nil {
return err
}
res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second))
if err != nil {
return err
}
if res.CommandTimedOut {
fmt.Println(res.StdCombined)
return errors.New("go fmt timed out")
}
if res.ExitCode != 0 {
fmt.Println(res.StdCombined)
return errors.New("go fmt did not succeed")
}
return nil
}
func processIDFile(basedir string, fn string) ([]IDDef, string, error) {
file, err := os.Open(fn)
if err != nil {
return nil, "", err
}
defer func() { _ = file.Close() }()
bin, err := io.ReadAll(file)
if err != nil {
return nil, "", err
}
lines := strings.Split(string(bin), "\n")
ids := make([]IDDef, 0)
pkgname := ""
for i, line := range lines {
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
break
}
if match, ok := rexIDPackage.MatchFirst(line); i == 0 && ok {
pkgname = match.GroupByName("name").Value()
continue
}
if match, ok := rexIDDef.MatchFirst(line); ok {
rfp, err := filepath.Rel(basedir, fn)
if err != nil {
return nil, "", err
}
def := IDDef{
File: fn,
FileRelative: rfp,
Name: match.GroupByName("name").Value(),
}
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
ids = append(ids, def)
}
}
return ids, pkgname, nil
}
func fmtIDOutput(cs string, ids []IDDef, pkgname string) string {
str := "// Code generated by id-generate.go DO NOT EDIT.\n"
str += "\n"
str += "package " + pkgname + "\n"
str += "\n"
str += "import \"go.mongodb.org/mongo-driver/bson\"" + "\n"
str += "import \"go.mongodb.org/mongo-driver/bson/bsontype\"" + "\n"
str += "import \"go.mongodb.org/mongo-driver/bson/primitive\"" + "\n"
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/exerr\"" + "\n"
str += "\n"
str += "const ChecksumIDGenerator = \"" + cs + "\" // GoExtVersion: " + goext.GoextVersion + "\n"
str += "\n"
anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" })
for _, iddef := range ids {
str += "// ================================ " + iddef.Name + " (" + iddef.FileRelative + ") ================================" + "\n"
str += "" + "\n"
str += "func (i " + iddef.Name + ") MarshalBSONValue() (bsontype.Type, []byte, error) {" + "\n"
str += " if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil {" + "\n"
str += " return bson.MarshalValue(objId)" + "\n"
str += " } else {" + "\n"
str += " return 0, nil, exerr.New(exerr.TypeMarshalEntityID, \"Failed to marshal " + iddef.Name + "(\"+i.String()+\") to ObjectId\").Str(\"value\", string(i)).Type(\"type\", i).Build()" + "\n"
str += " }" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (i " + iddef.Name + ") String() string {" + "\n"
str += " return string(i)" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (i " + iddef.Name + ") ObjID() (primitive.ObjectID, error) {" + "\n"
str += " return primitive.ObjectIDFromHex(string(i))" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (i " + iddef.Name + ") Valid() bool {" + "\n"
str += " _, err := primitive.ObjectIDFromHex(string(i))" + "\n"
str += " return err == nil" + "\n"
str += "}" + "\n"
str += "" + "\n"
if anyDef != nil {
str += "func (i " + iddef.Name + ") AsAny() " + anyDef.Name + " {" + "\n"
str += " return " + anyDef.Name + "(i)" + "\n"
str += "}" + "\n"
str += "" + "\n"
}
str += "func New" + iddef.Name + "() " + iddef.Name + " {" + "\n"
str += " return " + iddef.Name + "(primitive.NewObjectID().Hex())" + "\n"
str += "}" + "\n"
str += "" + "\n"
}
return str
}

View File

@@ -14,6 +14,7 @@ type CommandRunner struct {
listener []CommandListener listener []CommandListener
enforceExitCodes *[]int enforceExitCodes *[]int
enforceNoTimeout bool enforceNoTimeout bool
enforceNoStderr bool
} }
func Runner(program string) *CommandRunner { func Runner(program string) *CommandRunner {
@@ -25,6 +26,7 @@ func Runner(program string) *CommandRunner {
listener: make([]CommandListener, 0), listener: make([]CommandListener, 0),
enforceExitCodes: nil, enforceExitCodes: nil,
enforceNoTimeout: false, enforceNoTimeout: false,
enforceNoStderr: false,
} }
} }
@@ -73,6 +75,11 @@ func (r *CommandRunner) FailOnTimeout() *CommandRunner {
return r return r
} }
func (r *CommandRunner) FailOnStderr() *CommandRunner {
r.enforceNoStderr = true
return r
}
func (r *CommandRunner) Listen(lstr CommandListener) *CommandRunner { func (r *CommandRunner) Listen(lstr CommandListener) *CommandRunner {
r.listener = append(r.listener, lstr) r.listener = append(r.listener, lstr)
return r return r

View File

@@ -11,6 +11,7 @@ import (
var ErrExitCode = errors.New("process exited with an unexpected exitcode") var ErrExitCode = errors.New("process exited with an unexpected exitcode")
var ErrTimeout = errors.New("process did not exit after the specified timeout") var ErrTimeout = errors.New("process did not exit after the specified timeout")
var ErrStderrPrint = errors.New("process did print to stderr stream")
type CommandResult struct { type CommandResult struct {
StdOut string StdOut string
@@ -53,12 +54,27 @@ func run(opt CommandRunner) (CommandResult, error) {
err error err error
} }
stderrFailChan := make(chan bool)
outputChan := make(chan resultObj) outputChan := make(chan resultObj)
go func() { go func() {
// we need to first fully read the pipes and then call Wait // we need to first fully read the pipes and then call Wait
// see https://pkg.go.dev/os/exec#Cmd.StdoutPipe // see https://pkg.go.dev/os/exec#Cmd.StdoutPipe
stdout, stderr, stdcombined, err := preader.Read(opt.listener) listener := make([]CommandListener, 0)
listener = append(listener, opt.listener...)
if opt.enforceNoStderr {
listener = append(listener, genericCommandListener{
_readRawStderr: langext.Ptr(func(v []byte) {
if len(v) > 0 {
stderrFailChan <- true
}
}),
})
}
stdout, stderr, stdcombined, err := preader.Read(listener)
if err != nil { if err != nil {
outputChan <- resultObj{stdout, stderr, stdcombined, err} outputChan <- resultObj{stdout, stderr, stdcombined, err}
_ = cmd.Process.Kill() _ = cmd.Process.Kill()
@@ -115,6 +131,34 @@ func run(opt CommandRunner) (CommandResult, error) {
return res, nil return res, nil
} }
case <-stderrFailChan:
_ = cmd.Process.Kill()
for _, lstr := range opt.listener {
lstr.Timeout()
}
if fallback, ok := syncext.ReadChannelWithTimeout(outputChan, 32*time.Millisecond); ok {
// most of the time the cmd.Process.Kill() should also have finished the pipereader
// and we can at least return the already collected stdout, stderr, etc
res := CommandResult{
StdOut: fallback.stdout,
StdErr: fallback.stderr,
StdCombined: fallback.stdcombined,
ExitCode: -1,
CommandTimedOut: false,
}
return res, ErrStderrPrint
} else {
res := CommandResult{
StdOut: "",
StdErr: "",
StdCombined: "",
ExitCode: -1,
CommandTimedOut: false,
}
return res, ErrStderrPrint
}
case outobj := <-outputChan: case outobj := <-outputChan:
if exiterr, ok := outobj.err.(*exec.ExitError); ok { if exiterr, ok := outobj.err.(*exec.ExitError); ok {
excode := exiterr.ExitCode() excode := exiterr.ExitCode()

View File

@@ -1,6 +1,7 @@
package cmdext package cmdext
import ( import (
"errors"
"fmt" "fmt"
"testing" "testing"
"time" "time"
@@ -32,7 +33,7 @@ func TestStdout(t *testing.T) {
func TestStderr(t *testing.T) { func TestStderr(t *testing.T) {
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run() res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run()
if err != nil { if err != nil {
t.Errorf("%v", err) t.Errorf("%v", err)
} }
@@ -55,7 +56,7 @@ func TestStderr(t *testing.T) {
} }
func TestStdcombined(t *testing.T) { func TestStdcombined(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)"). Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)").
Run() Run()
@@ -81,7 +82,7 @@ func TestStdcombined(t *testing.T) {
} }
func TestPartialRead(t *testing.T) { func TestPartialRead(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);"). Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);").
Timeout(100 * time.Millisecond). Timeout(100 * time.Millisecond).
@@ -105,7 +106,7 @@ func TestPartialRead(t *testing.T) {
} }
func TestPartialReadStderr(t *testing.T) { func TestPartialReadStderr(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);"). Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);").
Timeout(100 * time.Millisecond). Timeout(100 * time.Millisecond).
@@ -130,7 +131,7 @@ func TestPartialReadStderr(t *testing.T) {
func TestReadUnflushedStdout(t *testing.T) { func TestReadUnflushedStdout(t *testing.T) {
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run() res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run()
if err != nil { if err != nil {
t.Errorf("%v", err) t.Errorf("%v", err)
} }
@@ -154,7 +155,7 @@ func TestReadUnflushedStdout(t *testing.T) {
func TestReadUnflushedStderr(t *testing.T) { func TestReadUnflushedStderr(t *testing.T) {
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run() res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run()
if err != nil { if err != nil {
t.Errorf("%v", err) t.Errorf("%v", err)
} }
@@ -179,7 +180,7 @@ func TestReadUnflushedStderr(t *testing.T) {
func TestPartialReadUnflushed(t *testing.T) { func TestPartialReadUnflushed(t *testing.T) {
t.SkipNow() t.SkipNow()
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');"). Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');").
Timeout(100 * time.Millisecond). Timeout(100 * time.Millisecond).
@@ -205,7 +206,7 @@ func TestPartialReadUnflushed(t *testing.T) {
func TestPartialReadUnflushedStderr(t *testing.T) { func TestPartialReadUnflushedStderr(t *testing.T) {
t.SkipNow() t.SkipNow()
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');"). Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');").
Timeout(100 * time.Millisecond). Timeout(100 * time.Millisecond).
@@ -230,7 +231,7 @@ func TestPartialReadUnflushedStderr(t *testing.T) {
func TestListener(t *testing.T) { func TestListener(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys;" + Arg("import sys;" +
"import time;" + "import time;" +
@@ -263,7 +264,7 @@ func TestListener(t *testing.T) {
func TestLongStdout(t *testing.T) { func TestLongStdout(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");"). Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");").
Timeout(5000 * time.Millisecond). Timeout(5000 * time.Millisecond).
@@ -289,16 +290,40 @@ func TestLongStdout(t *testing.T) {
func TestFailOnTimeout(t *testing.T) { func TestFailOnTimeout(t *testing.T) {
_, err := Runner("sleep").Arg("2").Timeout(200 * time.Millisecond).FailOnTimeout().Run() _, err := Runner("sleep").Arg("2").Timeout(200 * time.Millisecond).FailOnTimeout().Run()
if err != ErrTimeout { if !errors.Is(err, ErrTimeout) {
t.Errorf("wrong err := %v", err) t.Errorf("wrong err := %v", err)
} }
} }
func TestFailOnStderr(t *testing.T) {
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").FailOnStderr().Run()
if err == nil {
t.Errorf("no err")
}
if res1.CommandTimedOut {
t.Errorf("Timeout")
}
if res1.ExitCode != -1 {
t.Errorf("res1.ExitCode == %v", res1.ExitCode)
}
if res1.StdErr != "error" {
t.Errorf("res1.StdErr == '%v'", res1.StdErr)
}
if res1.StdOut != "" {
t.Errorf("res1.StdOut == '%v'", res1.StdOut)
}
if res1.StdCombined != "error\n" {
t.Errorf("res1.StdCombined == '%v'", res1.StdCombined)
}
}
func TestFailOnExitcode(t *testing.T) { func TestFailOnExitcode(t *testing.T) {
_, err := Runner("false").Timeout(200 * time.Millisecond).FailOnExitCode().Run() _, err := Runner("false").Timeout(200 * time.Millisecond).FailOnExitCode().Run()
if err != ErrExitCode { if !errors.Is(err, ErrExitCode) {
t.Errorf("wrong err := %v", err) t.Errorf("wrong err := %v", err)
} }

View File

@@ -7,6 +7,9 @@ type SyncSet[TData comparable] struct {
lock sync.Mutex lock sync.Mutex
} }
// Add adds `value` to the set
// returns true if the value was actually inserted
// returns false if the value already existed
func (s *SyncSet[TData]) Add(value TData) bool { func (s *SyncSet[TData]) Add(value TData) bool {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
@@ -15,10 +18,10 @@ func (s *SyncSet[TData]) Add(value TData) bool {
s.data = make(map[TData]bool) s.data = make(map[TData]bool)
} }
_, ok := s.data[value] _, existsInPreState := s.data[value]
s.data[value] = true s.data[value] = true
return !ok return !existsInPreState
} }
func (s *SyncSet[TData]) AddAll(values []TData) { func (s *SyncSet[TData]) AddAll(values []TData) {

170
dataext/tuple.go Normal file
View File

@@ -0,0 +1,170 @@
package dataext
type ValueGroup interface {
TupleLength() int
TupleValues() []any
}
// ----------------------------------------------------------------------------
type Single[T1 any] struct {
V1 T1
}
func (s Single[T1]) TupleLength() int {
return 1
}
func (s Single[T1]) TupleValues() []any {
return []any{s.V1}
}
// ----------------------------------------------------------------------------
type Tuple[T1 any, T2 any] struct {
V1 T1
V2 T2
}
func (t Tuple[T1, T2]) TupleLength() int {
return 2
}
func (t Tuple[T1, T2]) TupleValues() []any {
return []any{t.V1, t.V2}
}
// ----------------------------------------------------------------------------
type Triple[T1 any, T2 any, T3 any] struct {
V1 T1
V2 T2
V3 T3
}
func (t Triple[T1, T2, T3]) TupleLength() int {
return 3
}
func (t Triple[T1, T2, T3]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3}
}
// ----------------------------------------------------------------------------
type Quadruple[T1 any, T2 any, T3 any, T4 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
}
func (t Quadruple[T1, T2, T3, T4]) TupleLength() int {
return 4
}
func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4}
}
// ----------------------------------------------------------------------------
type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
}
func (t Quintuple[T1, T2, T3, T4, T5]) TupleLength() int {
return 5
}
func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5}
}
// ----------------------------------------------------------------------------
type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
V6 T6
}
func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleLength() int {
return 6
}
func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6}
}
// ----------------------------------------------------------------------------
type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
V6 T6
V7 T7
}
func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleLength() int {
return 7
}
func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7}
}
// ----------------------------------------------------------------------------
type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
V6 T6
V7 T7
V8 T8
}
func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleLength() int {
return 8
}
func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8}
}
// ----------------------------------------------------------------------------
type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
V6 T6
V7 T7
V8 T8
V9 T9
}
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int {
return 9
}
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9}
}

24
enums/enum.go Normal file
View File

@@ -0,0 +1,24 @@
package enums
type Enum interface {
Valid() bool
ValuesAny() []any
ValuesMeta() []EnumMetaValue
VarName() string
}
type StringEnum interface {
Enum
String() string
}
type DescriptionEnum interface {
Enum
Description() string
}
type EnumMetaValue struct {
VarName string `json:"varName"`
Value any `json:"value"`
Description *string `json:"description"`
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/rs/zerolog" "github.com/rs/zerolog"
"go.mongodb.org/mongo-driver/bson/primitive" "go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/dataext" "gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/enums"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"net/http" "net/http"
"os" "os"
@@ -80,6 +81,10 @@ func New(t ErrorType, msg string) *Builder {
} }
func Wrap(err error, msg string) *Builder { func Wrap(err error, msg string) *Builder {
if err == nil {
return &Builder{errorData: newExErr(CatSystem, TypeInternal, msg)} // prevent NPE if we call Wrap with err==nil
}
if !pkgconfig.RecursiveErrors { if !pkgconfig.RecursiveErrors {
v := FromError(err) v := FromError(err)
v.Message = msg v.Message = msg
@@ -269,6 +274,18 @@ func (b *Builder) Any(key string, val any) *Builder {
return b.addMeta(key, MDTAny, newAnyWrap(val)) return b.addMeta(key, MDTAny, newAnyWrap(val))
} }
func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder {
if val == nil {
return b.addMeta(key, MDTString, "(!nil)")
} else {
return b.addMeta(key, MDTString, val.String())
}
}
func (b *Builder) Enum(key string, val enums.Enum) *Builder {
return b.addMeta(key, MDTEnum, newEnumWrap(val))
}
func (b *Builder) Stack() *Builder { func (b *Builder) Stack() *Builder {
return b.addMeta("@Stack", MDTString, string(debug.Stack())) return b.addMeta("@Stack", MDTString, string(debug.Stack()))
} }
@@ -289,7 +306,7 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
} }
b.Str("gin.method", req.Method) b.Str("gin.method", req.Method)
b.Str("gin.path", g.FullPath()) b.Str("gin.path", g.FullPath())
b.Str("gin.header", formatHeader(g.Request.Header)) b.Strs("gin.header", extractHeader(g.Request.Header))
if req.URL != nil { if req.URL != nil {
b.Str("gin.url", req.URL.String()) b.Str("gin.url", req.URL.String())
} }
@@ -305,22 +322,38 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
if ctxVal := g.GetString("reqid"); ctxVal != "" { if ctxVal := g.GetString("reqid"); ctxVal != "" {
b.Str("gin.context.reqid", ctxVal) b.Str("gin.context.reqid", ctxVal)
} }
if req.Method != "GET" && req.Body != nil && req.Header.Get("Content-Type") == "application/json" { if req.Method != "GET" && req.Body != nil {
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
if bin, err := brc.BufferedAll(); err == nil { if req.Header.Get("Content-Type") == "application/json" {
if len(bin) < 16*1024 { if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
var prettyJSON bytes.Buffer if bin, err := brc.BufferedAll(); err == nil {
err = json.Indent(&prettyJSON, bin, "", " ") if len(bin) < 16*1024 {
if err == nil { var prettyJSON bytes.Buffer
b.Str("gin.body", string(prettyJSON.Bytes())) err = json.Indent(&prettyJSON, bin, "", " ")
if err == nil {
b.Str("gin.body", string(prettyJSON.Bytes()))
} else {
b.Bytes("gin.body", bin)
}
} else { } else {
b.Bytes("gin.body", bin) b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
} }
} else {
b.Str("gin.body", fmt.Sprintf("[[%v bytes]]", len(bin)))
} }
} }
} }
if req.Header.Get("Content-Type") == "multipart/form-data" || req.Header.Get("Content-Type") == "x-www-form-urlencoded" {
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
if bin, err := brc.BufferedAll(); err == nil {
if len(bin) < 16*1024 {
b.Bytes("gin.body", bin)
} else {
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
}
}
}
}
} }
b.containsGinData = true b.containsGinData = true
@@ -350,6 +383,20 @@ func formatHeader(header map[string][]string) string {
return r return r
} }
func extractHeader(header map[string][]string) []string {
r := make([]string, 0, len(header))
for k, v := range header {
for _, hval := range v {
value := hval
value = strings.ReplaceAll(value, "\n", "\\n")
value = strings.ReplaceAll(value, "\r", "\\r")
value = strings.ReplaceAll(value, "\t", "\\t")
r = append(r, k+": "+value)
}
}
return r
}
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// Build creates a new error, ready to pass up the stack // Build creates a new error, ready to pass up the stack

View File

@@ -27,6 +27,7 @@ func FromError(err error) *ExErr {
StatusCode: nil, StatusCode: nil,
Message: err.Error(), Message: err.Error(),
WrappedErrType: fmt.Sprintf("%T", err), WrappedErrType: fmt.Sprintf("%T", err),
WrappedErr: err,
Caller: "", Caller: "",
OriginalError: nil, OriginalError: nil,
Meta: getForeignMeta(err), Meta: getForeignMeta(err),
@@ -43,6 +44,7 @@ func newExErr(cat ErrorCategory, errtype ErrorType, msg string) *ExErr {
StatusCode: nil, StatusCode: nil,
Message: msg, Message: msg,
WrappedErrType: "", WrappedErrType: "",
WrappedErr: nil,
Caller: callername(2), Caller: callername(2),
OriginalError: nil, OriginalError: nil,
Meta: make(map[string]MetaValue), Meta: make(map[string]MetaValue),
@@ -59,6 +61,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE
StatusCode: e.StatusCode, StatusCode: e.StatusCode,
Message: msg, Message: msg,
WrappedErrType: "", WrappedErrType: "",
WrappedErr: nil,
Caller: callername(1 + stacktraceskip), Caller: callername(1 + stacktraceskip),
OriginalError: e, OriginalError: e,
Meta: make(map[string]MetaValue), Meta: make(map[string]MetaValue),

View File

@@ -1,6 +1,7 @@
package exerr package exerr
import ( import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
@@ -37,25 +38,39 @@ type ErrorType struct {
//goland:noinspection GoUnusedGlobalVariable //goland:noinspection GoUnusedGlobalVariable
var ( var (
TypeInternal = ErrorType{"INTERNAL_ERROR", langext.Ptr(500)} TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
TypePanic = ErrorType{"PANIC", langext.Ptr(500)} TypePanic = NewType("PANIC", langext.Ptr(500))
TypeNotImplemented = ErrorType{"NOT_IMPLEMENTED", langext.Ptr(500)} TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
TypeWrap = ErrorType{"Wrap", nil} TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
TypeBindFailURI = ErrorType{"BINDFAIL_URI", langext.Ptr(400)} TypeWrap = NewType("Wrap", nil)
TypeBindFailQuery = ErrorType{"BINDFAIL_QUERY", langext.Ptr(400)}
TypeBindFailJSON = ErrorType{"BINDFAIL_JSON", langext.Ptr(400)}
TypeBindFailFormData = ErrorType{"BINDFAIL_FORMDATA", langext.Ptr(400)}
TypeBindFailHeader = ErrorType{"BINDFAIL_HEADER", langext.Ptr(400)}
TypeUnauthorized = ErrorType{"UNAUTHORIZED", langext.Ptr(401)} TypeBindFailURI = NewType("BINDFAIL_URI", langext.Ptr(400))
TypeAuthFailed = ErrorType{"AUTH_FAILED", langext.Ptr(401)} TypeBindFailQuery = NewType("BINDFAIL_QUERY", langext.Ptr(400))
TypeBindFailJSON = NewType("BINDFAIL_JSON", langext.Ptr(400))
TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400))
TypeBindFailHeader = NewType("BINDFAIL_HEADER", langext.Ptr(400))
// other values come from pkgconfig TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400))
TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401))
TypeAuthFailed = NewType("AUTH_FAILED", langext.Ptr(401))
// other values come the used package
) )
var registeredTypes = dataext.SyncSet[string]{}
func NewType(key string, defStatusCode *int) ErrorType { func NewType(key string, defStatusCode *int) ErrorType {
insertOkay := registeredTypes.Add(key)
if !insertOkay {
panic("Cannot register same ErrType ('" + key + "') more than once")
}
return ErrorType{key, defStatusCode} return ErrorType{key, defStatusCode}
} }

View File

@@ -6,32 +6,35 @@ import (
) )
type ErrorPackageConfig struct { type ErrorPackageConfig struct {
ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal) ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal)
ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities) ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities)
RecursiveErrors bool // errors contains their Origin-Error RecursiveErrors bool // errors contains their Origin-Error
ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output() ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output()
ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields IncludeMetaInGinOutput bool // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output()
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields
} }
type ErrorPackageConfigInit struct { type ErrorPackageConfigInit struct {
ZeroLogErrTraces bool ZeroLogErrTraces *bool
ZeroLogAllTraces bool ZeroLogAllTraces *bool
RecursiveErrors bool RecursiveErrors *bool
ExtendedGinOutput bool ExtendedGinOutput *bool
ExtendGinOutput func(err *ExErr, json map[string]any) IncludeMetaInGinOutput *bool
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) ExtendGinOutput func(err *ExErr, json map[string]any)
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any)
} }
var initialized = false var initialized = false
var pkgconfig = ErrorPackageConfig{ var pkgconfig = ErrorPackageConfig{
ZeroLogErrTraces: true, ZeroLogErrTraces: true,
ZeroLogAllTraces: false, ZeroLogAllTraces: false,
RecursiveErrors: true, RecursiveErrors: true,
ExtendedGinOutput: false, ExtendedGinOutput: false,
ExtendGinOutput: func(err *ExErr, json map[string]any) {}, IncludeMetaInGinOutput: true,
ExtendGinDataOutput: func(err *ExErr, depth int, json map[string]any) {}, ExtendGinOutput: func(err *ExErr, json map[string]any) {},
ExtendGinDataOutput: func(err *ExErr, depth int, json map[string]any) {},
} }
// Init initializes the exerr packages // Init initializes the exerr packages
@@ -53,12 +56,13 @@ func Init(cfg ErrorPackageConfigInit) {
} }
pkgconfig = ErrorPackageConfig{ pkgconfig = ErrorPackageConfig{
ZeroLogErrTraces: cfg.ZeroLogErrTraces, ZeroLogErrTraces: langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces),
ZeroLogAllTraces: cfg.ZeroLogAllTraces, ZeroLogAllTraces: langext.Coalesce(cfg.ZeroLogAllTraces, pkgconfig.ZeroLogAllTraces),
RecursiveErrors: cfg.RecursiveErrors, RecursiveErrors: langext.Coalesce(cfg.RecursiveErrors, pkgconfig.RecursiveErrors),
ExtendedGinOutput: cfg.ExtendedGinOutput, ExtendedGinOutput: langext.Coalesce(cfg.ExtendedGinOutput, pkgconfig.ExtendedGinOutput),
ExtendGinOutput: ego, IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput),
ExtendGinDataOutput: egdo, ExtendGinOutput: ego,
ExtendGinDataOutput: egdo,
} }
initialized = true initialized = true

View File

@@ -4,6 +4,7 @@ import (
"github.com/rs/xid" "github.com/rs/xid"
"github.com/rs/zerolog" "github.com/rs/zerolog"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strings" "strings"
"time" "time"
) )
@@ -20,9 +21,10 @@ type ExErr struct {
Message string `json:"message"` Message string `json:"message"`
WrappedErrType string `json:"wrappedErrType"` WrappedErrType string `json:"wrappedErrType"`
WrappedErr any `json:"-"`
Caller string `json:"caller"` Caller string `json:"caller"`
OriginalError *ExErr OriginalError *ExErr `json:"originalError"`
Meta MetaMap `json:"meta"` Meta MetaMap `json:"meta"`
} }
@@ -31,10 +33,49 @@ func (ee *ExErr) Error() string {
return ee.Message return ee.Message
} }
// Unwrap must be implemented so that some error.XXX methods work
func (ee *ExErr) Unwrap() error { func (ee *ExErr) Unwrap() error {
if ee.OriginalError == nil {
return nil // this is neccessary - otherwise we return a wrapped nil and the `x == nil` comparison fails (= panic in errors.Is and other failures)
}
return ee.OriginalError return ee.OriginalError
} }
// Is must be implemented so that error.Is(x) works
func (ee *ExErr) Is(e error) bool {
return IsFrom(ee, e)
}
// As must be implemented so that error.As(x) works
//
//goland:noinspection GoTypeAssertionOnErrors
func (ee *ExErr) As(target any) bool {
if dstErr, ok := target.(*ExErr); ok {
if dst0, ok := ee.contains(dstErr); ok {
dstErr = dst0
return true
} else {
return false
}
} else {
val := reflect.ValueOf(target)
typStr := val.Type().Elem().String()
for curr := ee; curr != nil; curr = curr.OriginalError {
if curr.Category == CatForeign && curr.WrappedErrType == typStr && curr.WrappedErr != nil {
val.Elem().Set(reflect.ValueOf(curr.WrappedErr))
return true
}
}
return false
}
}
func (ee *ExErr) Log(evt *zerolog.Event) { func (ee *ExErr) Log(evt *zerolog.Event) {
evt.Msg(ee.FormatLog(LogPrintFull)) evt.Msg(ee.FormatLog(LogPrintFull))
} }
@@ -190,6 +231,7 @@ func (ee *ExErr) RecursiveMeta(key string) *MetaValue {
return nil return nil
} }
// Depth returns the depth of recursively contained errors
func (ee *ExErr) Depth() int { func (ee *ExErr) Depth() int {
if ee.OriginalError == nil { if ee.OriginalError == nil {
return 1 return 1
@@ -198,6 +240,59 @@ func (ee *ExErr) Depth() int {
} }
} }
// contains test if the supplied error is contained in this error (anywhere in the chain)
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
if original == nil {
return nil, false
}
if ee == original {
return ee, true
}
for curr := ee; curr != nil; curr = curr.OriginalError {
if curr.equalsDirectProperties(curr) {
return curr, true
}
}
return nil, false
}
// equalsDirectProperties tests if ee and other are equals, but only looks at primary properties (not `OriginalError` or `Meta`)
func (ee *ExErr) equalsDirectProperties(other *ExErr) bool {
if ee.UniqueID != other.UniqueID {
return false
}
if ee.Timestamp != other.Timestamp {
return false
}
if ee.Category != other.Category {
return false
}
if ee.Severity != other.Severity {
return false
}
if ee.Type != other.Type {
return false
}
if ee.StatusCode != other.StatusCode {
return false
}
if ee.Message != other.Message {
return false
}
if ee.WrappedErrType != other.WrappedErrType {
return false
}
if ee.Caller != other.Caller {
return false
}
return true
}
func newID() string { func newID() string {
return xid.New().String() return xid.New().String()
} }

93
exerr/exerr_test.go Normal file
View File

@@ -0,0 +1,93 @@
package exerr
import (
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
)
type golangErr struct {
Message string
}
func (g golangErr) Error() string {
return g.Message
}
type golangErr2 struct {
Message string
}
func (g golangErr2) Error() string {
return g.Message
}
type simpleError struct {
}
func (g simpleError) Error() string {
return "Something simple went wroong"
}
type simpleError2 struct {
}
func (g simpleError2) Error() string {
return "Something simple went wroong"
}
func TestExErrIs1(t *testing.T) {
e0 := simpleError{}
wrap := Wrap(e0, "something went wrong").Str("test", "123").Build()
tst.AssertTrue(t, errors.Is(wrap, simpleError{}))
tst.AssertFalse(t, errors.Is(wrap, golangErr{}))
tst.AssertFalse(t, errors.Is(wrap, golangErr{"error1"}))
}
func TestExErrIs2(t *testing.T) {
e0 := golangErr{"error1"}
wrap := Wrap(e0, "something went wrong").Str("test", "123").Build()
tst.AssertTrue(t, errors.Is(wrap, e0))
tst.AssertTrue(t, errors.Is(wrap, golangErr{"error1"}))
tst.AssertFalse(t, errors.Is(wrap, golangErr{"error2"}))
tst.AssertFalse(t, errors.Is(wrap, simpleError{}))
}
func TestExErrAs(t *testing.T) {
e0 := golangErr{"error1"}
w0 := Wrap(e0, "something went wrong").Str("test", "123").Build()
{
out := golangErr{}
ok := errors.As(w0, &out)
tst.AssertTrue(t, ok)
tst.AssertEqual(t, out.Message, "error1")
}
w1 := Wrap(w0, "outher error").Build()
{
out := golangErr{}
ok := errors.As(w1, &out)
tst.AssertTrue(t, ok)
tst.AssertEqual(t, out.Message, "error1")
}
{
out := golangErr2{}
ok := errors.As(w1, &out)
tst.AssertFalse(t, ok)
}
{
out := simpleError2{}
ok := errors.As(w1, &out)
tst.AssertFalse(t, ok)
}
}

View File

@@ -3,12 +3,13 @@ package exerr
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
json "gogs.mikescher.com/BlackForestBytes/goext/gojson" json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"net/http" "net/http"
"time" "time"
) )
func (ee *ExErr) toJson(depth int) gin.H { func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) langext.H {
ginJson := gin.H{} ginJson := langext.H{}
if ee.UniqueID != "" { if ee.UniqueID != "" {
ginJson["id"] = ee.UniqueID ginJson["id"] = ee.UniqueID
@@ -38,21 +39,60 @@ func (ee *ExErr) toJson(depth int) gin.H {
ginJson["wrappedErrType"] = ee.WrappedErrType ginJson["wrappedErrType"] = ee.WrappedErrType
} }
if ee.OriginalError != nil { if ee.OriginalError != nil {
ginJson["original"] = ee.OriginalError.toJson(depth + 1) ginJson["original"] = ee.OriginalError.toJson(depth+1, applyExtendListener, outputMeta)
} }
pkgconfig.ExtendGinDataOutput(ee, depth, ginJson) if outputMeta {
metaJson := langext.H{}
for metaKey, metaVal := range ee.Meta {
metaJson[metaKey] = metaVal.rawValueForJson()
}
ginJson["meta"] = metaJson
}
if applyExtendListener {
pkgconfig.ExtendGinDataOutput(ee, depth, ginJson)
}
return ginJson return ginJson
} }
// ToAPIJson converts the ExError to a json object
// (the same object as used in the Output(gin) method)
//
// Parameters:
// - [applyExtendListener]: if false the pkgconfig.ExtendGinOutput / pkgconfig.ExtendGinDataOutput will not be applied
// - [includeWrappedErrors]: if false we do not include the recursive/wrapped errors in `__data`
// - [includeMetaFields]: if true we also include meta-values (aka from `.Str(key, value).Build()`), needs includeWrappedErrors=true
func (ee *ExErr) ToAPIJson(applyExtendListener bool, includeWrappedErrors bool, includeMetaFields bool) langext.H {
apiOutput := langext.H{
"errorid": ee.UniqueID,
"message": ee.RecursiveMessage(),
"errorcode": ee.RecursiveType().Key,
"category": ee.RecursiveCategory().Category,
}
if includeWrappedErrors {
apiOutput["__data"] = ee.toJson(0, applyExtendListener, includeMetaFields)
}
if applyExtendListener {
pkgconfig.ExtendGinOutput(ee, apiOutput)
}
return apiOutput
}
func (ee *ExErr) Output(g *gin.Context) { func (ee *ExErr) Output(g *gin.Context) {
warnOnPkgConfigNotInitialized()
var statuscode = http.StatusInternalServerError var statuscode = http.StatusInternalServerError
var baseCat = ee.RecursiveCategory() var baseCat = ee.RecursiveCategory()
var baseType = ee.RecursiveType() var baseType = ee.RecursiveType()
var baseStatuscode = ee.RecursiveStatuscode() var baseStatuscode = ee.RecursiveStatuscode()
var baseMessage = ee.RecursiveMessage()
if baseCat == CatUser { if baseCat == CatUser {
statuscode = http.StatusBadRequest statuscode = http.StatusBadRequest
@@ -66,20 +106,7 @@ func (ee *ExErr) Output(g *gin.Context) {
statuscode = *baseType.DefaultStatusCode statuscode = *baseType.DefaultStatusCode
} }
warnOnPkgConfigNotInitialized() ginOutput := ee.ToAPIJson(true, pkgconfig.ExtendedGinOutput, pkgconfig.IncludeMetaInGinOutput)
ginOutput := gin.H{
"errorid": ee.UniqueID,
"message": baseMessage,
"errorcode": baseType.Key,
"category": baseCat.Category,
}
if pkgconfig.ExtendedGinOutput {
ginOutput["__data"] = ee.toJson(0)
}
pkgconfig.ExtendGinOutput(ee, ginOutput)
g.Render(statuscode, json.GoJsonRender{Data: ginOutput, NilSafeSlices: true, NilSafeMaps: true}) g.Render(statuscode, json.GoJsonRender{Data: ginOutput, NilSafeSlices: true, NilSafeMaps: true})
} }

View File

@@ -24,6 +24,8 @@ func IsFrom(e error, original error) bool {
if e == nil { if e == nil {
return false return false
} }
//goland:noinspection GoDirectComparisonOfErrors
if e == original { if e == original {
return true return true
} }

View File

@@ -43,6 +43,7 @@ const (
MDTID metaDataType = "ID" MDTID metaDataType = "ID"
MDTAny metaDataType = "Interface" MDTAny metaDataType = "Interface"
MDTNil metaDataType = "Nil" MDTNil metaDataType = "Nil"
MDTEnum metaDataType = "Enum"
) )
type MetaValue struct { type MetaValue struct {
@@ -131,6 +132,8 @@ func (v MetaValue) SerializeValue() (string, error) {
return string(r), nil return string(r), nil
case MDTNil: case MDTNil:
return "", nil return "", nil
case MDTEnum:
return v.Value.(EnumWrap).Serialize(), nil
} }
return "", errors.New("Unknown type: " + string(v.DataType)) return "", errors.New("Unknown type: " + string(v.DataType))
} }
@@ -208,6 +211,8 @@ func (v MetaValue) ShortString(lim int) string {
return langext.StrLimit(string(r), lim, "...") return langext.StrLimit(string(r), lim, "...")
case MDTNil: case MDTNil:
return "<<null>>" return "<<null>>"
case MDTEnum:
return v.Value.(EnumWrap).String()
} }
return "(err)" return "(err)"
} }
@@ -270,6 +275,14 @@ func (v MetaValue) Apply(key string, evt *zerolog.Event) *zerolog.Event {
return evt.Ints32(key, v.Value.([]int32)) return evt.Ints32(key, v.Value.([]int32))
case MDTNil: case MDTNil:
return evt.Str(key, "<<null>>") return evt.Str(key, "<<null>>")
case MDTEnum:
if v.Value.(EnumWrap).IsNil {
return evt.Any(key, nil)
} else if v.Value.(EnumWrap).ValueRaw != nil {
return evt.Any(key, v.Value.(EnumWrap).ValueRaw)
} else {
return evt.Str(key, v.Value.(EnumWrap).ValueString)
}
} }
return evt.Str(key, "(err)") return evt.Str(key, "(err)")
} }
@@ -511,6 +524,10 @@ func (v *MetaValue) Deserialize(value string, datatype metaDataType) error {
v.Value = nil v.Value = nil
v.DataType = datatype v.DataType = datatype
return nil return nil
case MDTEnum:
v.Value = deserializeEnumWrap(value)
v.DataType = datatype
return nil
} }
return errors.New("Unknown type: " + string(datatype)) return errors.New("Unknown type: " + string(datatype))
} }
@@ -581,10 +598,66 @@ func (v MetaValue) ValueString() string {
return string(r) return string(r)
case MDTNil: case MDTNil:
return "<<null>>" return "<<null>>"
case MDTEnum:
return v.Value.(EnumWrap).String()
} }
return "(err)" return "(err)"
} }
// rawValueForJson returns most-of-the-time the `Value` field
// but for some datatyes we do special processing
// all, so we can pluck the output value in json.Marshal without any suprises
func (v MetaValue) rawValueForJson() any {
if v.DataType == MDTAny {
if v.Value.(AnyWrap).IsNil {
return nil
}
if v.Value.(AnyWrap).IsError {
return bson.M{"@error": true}
}
jsonobj := primitive.M{}
jsonarr := primitive.A{}
if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonobj); err == nil {
return jsonobj
} else if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonarr); err == nil {
return jsonarr
} else {
return bson.M{"type": v.Value.(AnyWrap).Type, "data": v.Value.(AnyWrap).Json}
}
}
if v.DataType == MDTID {
if v.Value.(IDWrap).IsNil {
return nil
}
return v.Value.(IDWrap).Value
}
if v.DataType == MDTBytes {
return hex.EncodeToString(v.Value.([]byte))
}
if v.DataType == MDTDuration {
return v.Value.(time.Duration).String()
}
if v.DataType == MDTTime {
return v.Value.(time.Time).Format(time.RFC3339Nano)
}
if v.DataType == MDTObjectID {
return v.Value.(primitive.ObjectID).Hex()
}
if v.DataType == MDTNil {
return nil
}
if v.DataType == MDTEnum {
if v.Value.(EnumWrap).IsNil {
return nil
}
if v.Value.(EnumWrap).ValueRaw != nil {
return v.Value.(EnumWrap).ValueRaw
}
return v.Value.(EnumWrap).ValueString
}
return v.Value
}
func (mm MetaMap) FormatOneLine(singleMaxLen int) string { func (mm MetaMap) FormatOneLine(singleMaxLen int) string {
r := "" r := ""

View File

@@ -4,6 +4,7 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/enums"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"strings" "strings"
) )
@@ -131,3 +132,58 @@ func deserializeAnyWrap(v string) AnyWrap {
} }
} }
} }
type EnumWrap struct {
Type string
ValueString string
ValueRaw enums.Enum // `ValueRaw` is lost during serialization roundtrip
IsNil bool
}
func newEnumWrap(val enums.Enum) EnumWrap {
t := fmt.Sprintf("%T", val)
arr := strings.Split(t, ".")
if len(arr) > 0 {
t = arr[len(arr)-1]
}
if langext.IsNil(val) {
return EnumWrap{Type: t, ValueString: "", ValueRaw: val, IsNil: true}
}
if enumstr, ok := val.(enums.StringEnum); ok {
return EnumWrap{Type: t, ValueString: enumstr.String(), ValueRaw: val, IsNil: false}
}
return EnumWrap{Type: t, ValueString: fmt.Sprintf("%v", val), ValueRaw: val, IsNil: false}
}
func (w EnumWrap) Serialize() string {
if w.IsNil {
return "!nil" + ":" + w.Type
}
return w.Type + ":" + w.ValueString
}
func (w EnumWrap) String() string {
if w.IsNil {
return w.Type + "<<nil>>"
}
return "[" + w.Type + "] " + w.ValueString
}
func deserializeEnumWrap(v string) EnumWrap {
r := strings.SplitN(v, ":", 2)
if len(r) == 2 && r[0] == "!nil" {
return EnumWrap{Type: r[1], ValueString: v, ValueRaw: nil, IsNil: true}
}
if len(r) == 0 {
return EnumWrap{}
} else if len(r) == 1 {
return EnumWrap{Type: "", ValueString: v, ValueRaw: nil, IsNil: false}
} else {
return EnumWrap{Type: r[0], ValueString: r[1], ValueRaw: nil, IsNil: false}
}
}

View File

@@ -14,6 +14,9 @@ type AppContext struct {
} }
func CreateAppContext(g *gin.Context, innerCtx context.Context, cancelFn context.CancelFunc) *AppContext { func CreateAppContext(g *gin.Context, innerCtx context.Context, cancelFn context.CancelFunc) *AppContext {
for key, value := range g.Keys {
innerCtx = context.WithValue(innerCtx, key, value)
}
return &AppContext{ return &AppContext{
inner: innerCtx, inner: innerCtx,
cancelFunc: cancelFn, cancelFunc: cancelFn,
@@ -38,6 +41,10 @@ func (ac *AppContext) Value(key any) any {
return ac.inner.Value(key) return ac.inner.Value(key)
} }
func (ac *AppContext) Set(key, value any) {
ac.inner = context.WithValue(ac.inner, key, value)
}
func (ac *AppContext) Cancel() { func (ac *AppContext) Cancel() {
ac.cancelled = true ac.cancelled = true
ac.cancelFunc() ac.cancelFunc()

View File

@@ -0,0 +1,12 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
)
func BodyBuffer(g *gin.Context) {
if g.Request.Body != nil {
g.Request.Body = dataext.NewBufferedReadCloser(g.Request.Body)
}
}

View File

@@ -18,6 +18,7 @@ type GinWrapper struct {
allowCors bool allowCors bool
ginDebug bool ginDebug bool
bufferBody bool
requestTimeout time.Duration requestTimeout time.Duration
routeSpecs []ginRouteSpec routeSpecs []ginRouteSpec
@@ -30,7 +31,13 @@ type ginRouteSpec struct {
Handler string Handler string
} }
func NewEngine(allowCors bool, ginDebug bool, timeout time.Duration) *GinWrapper { // NewEngine creates a new (wrapped) ginEngine
// Parameters are:
// - [allowCors] Add cors handler to allow all CORS requests on the default http methods
// - [ginDebug] Set gin.debug to true (adds more logs)
// - [bufferBody] Buffers the input body stream, this way the ginext error handler can later include the whole request body
// - [timeout] The default handler timeout
func NewEngine(allowCors bool, ginDebug bool, bufferBody bool, timeout time.Duration) *GinWrapper {
engine := gin.New() engine := gin.New()
wrapper := &GinWrapper{ wrapper := &GinWrapper{
@@ -38,6 +45,7 @@ func NewEngine(allowCors bool, ginDebug bool, timeout time.Duration) *GinWrapper
SuppressGinLogs: false, SuppressGinLogs: false,
allowCors: allowCors, allowCors: allowCors,
ginDebug: ginDebug, ginDebug: ginDebug,
bufferBody: bufferBody,
requestTimeout: timeout, requestTimeout: timeout,
} }

View File

@@ -8,6 +8,7 @@ import (
"gogs.mikescher.com/BlackForestBytes/goext/exerr" "gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"runtime/debug" "runtime/debug"
"time"
) )
type PreContext struct { type PreContext struct {
@@ -18,6 +19,7 @@ type PreContext struct {
body any body any
form any form any
header any header any
timeout *time.Duration
} }
func (pctx *PreContext) URI(uri any) *PreContext { func (pctx *PreContext) URI(uri any) *PreContext {
@@ -45,6 +47,11 @@ func (pctx *PreContext) Header(header any) *PreContext {
return pctx return pctx
} }
func (pctx *PreContext) WithTimeout(to time.Duration) *PreContext {
pctx.timeout = &to
return pctx
}
func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if pctx.uri != nil { if pctx.uri != nil {
if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil { if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil {
@@ -92,6 +99,14 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
Build() Build()
return nil, nil, langext.Ptr(Error(err)) return nil, nil, langext.Ptr(Error(err))
} }
} else if pctx.ginCtx.ContentType() == "application/x-www-form-urlencoded" {
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
err = exerr.Wrap(err, "Failed to read urlencoded-form").
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
}
} else { } else {
err := exerr.New(exerr.TypeBindFailFormData, "missing form body"). err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
Str("struct_type", fmt.Sprintf("%T", pctx.form)). Str("struct_type", fmt.Sprintf("%T", pctx.form)).
@@ -101,7 +116,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
} }
if pctx.header != nil { if pctx.header != nil {
if err := pctx.ginCtx.ShouldBindHeader(pctx.query); err != nil { if err := pctx.ginCtx.ShouldBindHeader(pctx.header); err != nil {
err = exerr.Wrap(err, "Failed to read header"). err = exerr.Wrap(err, "Failed to read header").
WithType(exerr.TypeBindFailHeader). WithType(exerr.TypeBindFailHeader).
Str("struct_type", fmt.Sprintf("%T", pctx.query)). Str("struct_type", fmt.Sprintf("%T", pctx.query)).
@@ -110,7 +125,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
} }
} }
ictx, cancel := context.WithTimeout(context.Background(), pctx.wrapper.requestTimeout) ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout))
actx := CreateAppContext(pctx.ginCtx, ictx, cancel) actx := CreateAppContext(pctx.ginCtx, ictx, cancel)
return actx, pctx.ginCtx, nil return actx, pctx.ginCtx, nil

View File

@@ -113,6 +113,31 @@ func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
return j return j
} }
type downloadDataHTTPResponse struct {
statusCode int
mimetype string
data []byte
filename *string
headers []headerval
}
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
g.Data(j.statusCode, j.mimetype, j.data)
}
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
type redirectHTTPResponse struct { type redirectHTTPResponse struct {
statusCode int statusCode int
url string url string
@@ -166,6 +191,10 @@ func Download(mimetype string, filepath string, filename string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename} return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
} }
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
}
func Redirect(sc int, newURL string) HTTPResponse { func Redirect(sc int, newURL string) HTTPResponse {
return &redirectHTTPResponse{statusCode: sc, url: newURL} return &redirectHTTPResponse{statusCode: sc, url: newURL}
} }

View File

@@ -111,6 +111,13 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
func (w *GinRouteBuilder) Handle(handler WHandlerFunc) { func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {
if w.routes.wrapper.bufferBody {
arr := make([]gin.HandlerFunc, 0, len(w.handlers)+1)
arr = append(arr, BodyBuffer)
arr = append(arr, w.handlers...)
w.handlers = arr
}
middlewareNames := langext.ArrMap(w.handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) }) middlewareNames := langext.ArrMap(w.handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) })
handlerName := nameOfFunction(handler) handlerName := nameOfFunction(handler)
@@ -129,20 +136,32 @@ func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {
w.routes.wrapper.routeSpecs = append(w.routes.wrapper.routeSpecs, ginRouteSpec{ w.routes.wrapper.routeSpecs = append(w.routes.wrapper.routeSpecs, ginRouteSpec{
Method: methodName, Method: methodName,
URL: w.relPath, URL: w.absPath,
Middlewares: middlewareNames, Middlewares: middlewareNames,
Handler: handlerName, Handler: handlerName,
}) })
} }
func (w *GinWrapper) NoRoute(handler WHandlerFunc) { func (w *GinWrapper) NoRoute(handler WHandlerFunc) {
w.engine.NoRoute(Wrap(w, handler))
handlers := make([]gin.HandlerFunc, 0)
if w.bufferBody {
handlers = append(handlers, BodyBuffer)
}
middlewareNames := langext.ArrMap(handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) })
handlerName := nameOfFunction(handler)
handlers = append(handlers, Wrap(w, handler))
w.engine.NoRoute(handlers...)
w.routeSpecs = append(w.routeSpecs, ginRouteSpec{ w.routeSpecs = append(w.routeSpecs, ginRouteSpec{
Method: "ANY", Method: "ANY",
URL: "[NO_ROUTE]", URL: "[NO_ROUTE]",
Middlewares: nil, Middlewares: middlewareNames,
Handler: nameOfFunction(handler), Handler: handlerName,
}) })
} }
@@ -162,7 +181,7 @@ func nameOfFunction(f any) string {
fname = fname[:len(fname)-len("-fm")] fname = fname[:len(fname)-len("-fm")]
} }
suffix := rext.W(regexp.MustCompile("\\.func[0-9]+$")) suffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`))
if match, ok := suffix.MatchFirst(fname); ok { if match, ok := suffix.MatchFirst(fname); ok {
fname = fname[:len(fname)-match.FullMatch().Length()] fname = fname[:len(fname)-match.FullMatch().Length()]

24
go.mod
View File

@@ -6,26 +6,26 @@ require (
github.com/gin-gonic/gin v1.9.1 github.com/gin-gonic/gin v1.9.1
github.com/jmoiron/sqlx v1.3.5 github.com/jmoiron/sqlx v1.3.5
github.com/rs/xid v1.5.0 github.com/rs/xid v1.5.0
github.com/rs/zerolog v1.29.1 github.com/rs/zerolog v1.30.0
go.mongodb.org/mongo-driver v1.12.0 go.mongodb.org/mongo-driver v1.12.1
golang.org/x/crypto v0.11.0 golang.org/x/crypto v0.13.0
golang.org/x/sys v0.10.0 golang.org/x/sys v0.12.0
golang.org/x/term v0.10.0 golang.org/x/term v0.12.0
) )
require ( require (
github.com/bytedance/sonic v1.10.0-rc2 // indirect github.com/bytedance/sonic v1.10.1 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/chenzhuoyu/iasm v0.9.0 // indirect github.com/chenzhuoyu/iasm v0.9.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.14.1 // indirect github.com/go-playground/validator/v10 v10.15.4 // indirect
github.com/goccy/go-json v0.10.2 // indirect github.com/goccy/go-json v0.10.2 // indirect
github.com/golang/snappy v0.0.4 // indirect github.com/golang/snappy v0.0.4 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.16.7 // indirect github.com/klauspost/compress v1.17.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/klauspost/cpuid/v2 v2.2.5 // indirect
github.com/leodido/go-urn v1.2.4 // indirect github.com/leodido/go-urn v1.2.4 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-colorable v0.1.13 // indirect
@@ -33,7 +33,7 @@ require (
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect github.com/montanaflynn/stats v0.7.1 // indirect
github.com/pelletier/go-toml/v2 v2.0.9 // indirect github.com/pelletier/go-toml/v2 v2.1.0 // indirect
github.com/pkg/errors v0.9.1 // indirect github.com/pkg/errors v0.9.1 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect github.com/ugorji/go/codec v1.2.11 // indirect
@@ -41,10 +41,10 @@ require (
github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
golang.org/x/arch v0.4.0 // indirect golang.org/x/arch v0.5.0 // indirect
golang.org/x/net v0.12.0 // indirect golang.org/x/net v0.15.0 // indirect
golang.org/x/sync v0.3.0 // indirect golang.org/x/sync v0.3.0 // indirect
golang.org/x/text v0.11.0 // indirect golang.org/x/text v0.13.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect google.golang.org/protobuf v1.31.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

46
go.sum
View File

@@ -2,6 +2,12 @@ github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
github.com/bytedance/sonic v1.10.0-rc2 h1:oDfRZ+4m6AYCOC0GFeOCeYqvBmucy1isvouS2K0cPzo= github.com/bytedance/sonic v1.10.0-rc2 h1:oDfRZ+4m6AYCOC0GFeOCeYqvBmucy1isvouS2K0cPzo=
github.com/bytedance/sonic v1.10.0-rc2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= github.com/bytedance/sonic v1.10.0-rc2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.10.0-rc3 h1:uNSnscRapXTwUgTyOF0GVljYD08p9X/Lbr9MweSV3V0=
github.com/bytedance/sonic v1.10.0-rc3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.10.0 h1:qtNZduETEIWJVIyDl01BeNxur2rW9OwTQ/yBqFRkKEk=
github.com/bytedance/sonic v1.10.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.10.1 h1:7a1wuFXL1cMy7a3f7/VFcEtriuXQnUBhtoVfOZiaysc=
github.com/bytedance/sonic v1.10.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
@@ -24,6 +30,14 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.14.1 h1:9c50NUPC30zyuKprjL3vNZ0m5oG+jU0zvx4AqHGnv4k= github.com/go-playground/validator/v10 v10.14.1 h1:9c50NUPC30zyuKprjL3vNZ0m5oG+jU0zvx4AqHGnv4k=
github.com/go-playground/validator/v10 v10.14.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-playground/validator/v10 v10.14.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.15.0 h1:nDU5XeOKtB3GEa+uB7GNYwhVKsgjAR7VgKoNB6ryXfw=
github.com/go-playground/validator/v10 v10.15.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.15.1 h1:BSe8uhN+xQ4r5guV/ywQI4gO59C2raYcGffYWZEjZzM=
github.com/go-playground/validator/v10 v10.15.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.15.3 h1:S+sSpunYjNPDuXkWbK+x+bA7iXiW296KG4dL3X7xUZo=
github.com/go-playground/validator/v10 v10.15.3/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.15.4 h1:zMXza4EpOdooxPel5xDqXEdXG5r+WggpvnAKMsalBjs=
github.com/go-playground/validator/v10 v10.15.4/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
@@ -44,6 +58,8 @@ github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQ
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I=
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM=
github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
@@ -75,6 +91,8 @@ github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0= github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0=
github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@@ -85,6 +103,8 @@ github.com/rs/zerolog v1.29.0 h1:Zes4hju04hjbvkVkOhdl2HpZa+0PmVwigmo8XoORE5w=
github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0= github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0=
github.com/rs/zerolog v1.29.1 h1:cO+d60CHkknCbvzEWxP0S9K6KqyTjrCNUy1LdQLCGPc= github.com/rs/zerolog v1.29.1 h1:cO+d60CHkknCbvzEWxP0S9K6KqyTjrCNUy1LdQLCGPc=
github.com/rs/zerolog v1.29.1/go.mod h1:Le6ESbR7hc+DP6Lt1THiV8CQSdkkNrd3R0XbEgp3ZBU= github.com/rs/zerolog v1.29.1/go.mod h1:Le6ESbR7hc+DP6Lt1THiV8CQSdkkNrd3R0XbEgp3ZBU=
github.com/rs/zerolog v1.30.0 h1:SymVODrcRsaRaSInD9yQtKbtWqwsfoPcRff/oRXLj4c=
github.com/rs/zerolog v1.30.0/go.mod h1:/tk+P47gFdPXq4QYjvCmT5/Gsug2nagsFWBWhAiSi1w=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
@@ -120,9 +140,13 @@ go.mongodb.org/mongo-driver v1.11.2 h1:+1v2rDQUWNcGW7/7E0Jvdz51V38XXxJfhzbV17aNH
go.mongodb.org/mongo-driver v1.11.2/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8= go.mongodb.org/mongo-driver v1.11.2/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8=
go.mongodb.org/mongo-driver v1.12.0 h1:aPx33jmn/rQuJXPQLZQ8NtfPQG8CaqgLThFtqRb0PiE= go.mongodb.org/mongo-driver v1.12.0 h1:aPx33jmn/rQuJXPQLZQ8NtfPQG8CaqgLThFtqRb0PiE=
go.mongodb.org/mongo-driver v1.12.0/go.mod h1:AZkxhPnFJUoH7kZlFkVKucV20K387miPfm7oimrSmK0= go.mongodb.org/mongo-driver v1.12.0/go.mod h1:AZkxhPnFJUoH7kZlFkVKucV20K387miPfm7oimrSmK0=
go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE=
go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.4.0 h1:A8WCeEWhLwPBKNbFi5Wv5UTCBx5zzubnXDlMOFAzFMc= golang.org/x/arch v0.4.0 h1:A8WCeEWhLwPBKNbFi5Wv5UTCBx5zzubnXDlMOFAzFMc=
golang.org/x/arch v0.4.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.4.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
@@ -131,6 +155,10 @@ golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8=
golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80= golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80=
golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk=
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@@ -139,6 +167,12 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
golang.org/x/net v0.13.0 h1:Nvo8UFsZ8X3BhAC9699Z1j7XQ3rsZnUUm7jfBEk1ueY=
golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14=
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -164,6 +198,10 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0 h1:g6Z6vPFA9dYBAF7DWcH6sCcOntplXsDKcliusYijMlw= golang.org/x/term v0.1.0 h1:g6Z6vPFA9dYBAF7DWcH6sCcOntplXsDKcliusYijMlw=
@@ -172,6 +210,10 @@ golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c= golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0=
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@@ -182,6 +224,10 @@ golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc=
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=

View File

@@ -1,5 +1,5 @@
package goext package goext
const GoextVersion = "0.0.210" const GoextVersion = "0.0.269"
const GoextVersionTimestamp = "2023-07-25T11:16:11+0200" const GoextVersionTimestamp = "2023-09-25T09:18:22+0200"

View File

@@ -400,7 +400,7 @@ func ArrCastErr[T1 any, T2 any](arr []T1) ([]T2, error) {
if vcast, ok := any(v).(T2); ok { if vcast, ok := any(v).(T2); ok {
r[i] = vcast r[i] = vcast
} else { } else {
return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2))) return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2)))
} }
} }
return r, nil return r, nil
@@ -412,7 +412,7 @@ func ArrCastPanic[T1 any, T2 any](arr []T1) []T2 {
if vcast, ok := any(v).(T2); ok { if vcast, ok := any(v).(T2); ok {
r[i] = vcast r[i] = vcast
} else { } else {
panic(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2))) panic(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2)))
} }
} }
return r return r
@@ -467,3 +467,15 @@ func ArrayToInterface[T any](t []T) []interface{} {
} }
return res return res
} }
func JoinString(arr []string, delimiter string) string {
str := ""
for i, v := range arr {
str += v
if i < len(arr)-1 {
str += delimiter
}
}
return str
}

12
langext/array_test.go Normal file
View File

@@ -0,0 +1,12 @@
package langext
import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
)
func TestJoinString(t *testing.T) {
ids := []string{"1", "2", "3"}
res := JoinString(ids, ",")
tst.AssertEqual(t, res, "1,2,3")
}

View File

@@ -1,6 +1,7 @@
package langext package langext
import ( import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing" "testing"
) )
@@ -59,9 +60,3 @@ func TestBase58FlickrDecoding(t *testing.T) {
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "9aJCVZR"), "Hello") tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "9aJCVZR"), "Hello")
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "48638rmBiUzG5NKQoX4KcuE5C8paCFACnE28F7qDx13PRtennAmYSSJQ5gJSRihf5ZDyEQS4UimtihR7uARt4wbty2fW9duTQTM9n1DwUBevreyzGwu6W4YSgrvQgCPDxsiE1mCdZsF8VEBpuHHEiJyw"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.") tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "48638rmBiUzG5NKQoX4KcuE5C8paCFACnE28F7qDx13PRtennAmYSSJQ5gJSRihf5ZDyEQS4UimtihR7uARt4wbty2fW9duTQTM9n1DwUBevreyzGwu6W4YSgrvQgCPDxsiE1mCdZsF8VEBpuHHEiJyw"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.")
} }
func tst.AssertEqual(t *testing.T, actual string, expected string) {
if actual != expected {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
}
}

View File

@@ -13,6 +13,7 @@ type Regex interface {
ReplaceAllFunc(haystack string, repl func(string) string) string ReplaceAllFunc(haystack string, repl func(string) string) string
RemoveAll(haystack string) string RemoveAll(haystack string) string
GroupCount() int GroupCount() int
String() string
} }
type regexWrapper struct { type regexWrapper struct {
@@ -42,6 +43,7 @@ func W(rex *regexp.Regexp) Regex {
// --------------------------------------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------------------------------
// IsMatch reports whether the string s contains any match of the regular expression re.
func (w *regexWrapper) IsMatch(haystack string) bool { func (w *regexWrapper) IsMatch(haystack string) bool {
return w.rex.MatchString(haystack) return w.rex.MatchString(haystack)
} }
@@ -88,6 +90,11 @@ func (w *regexWrapper) GroupCount() int {
return len(w.subnames) - 1 return len(w.subnames) - 1
} }
// String returns the source text used to compile the regular expression.
func (w *regexWrapper) String() string {
return w.rex.String()
}
// --------------------------------------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------------------------------
func (m RegexMatch) FullMatch() RegexMatchGroup { func (m RegexMatch) FullMatch() RegexMatchGroup {

View File

@@ -8,7 +8,7 @@ import (
func TestGroupByNameOrEmpty1(t *testing.T) { func TestGroupByNameOrEmpty1(t *testing.T) {
regex1 := W(regexp.MustCompile("0(?P<group1>A+)B(?P<group2>C+)0")) regex1 := W(regexp.MustCompile(`0(?P<group1>A+)B(?P<group2>C+)0`))
match1, ok1 := regex1.MatchFirst("Hello 0AAAABCCC0 Bye.") match1, ok1 := regex1.MatchFirst("Hello 0AAAABCCC0 Bye.")
@@ -26,7 +26,7 @@ func TestGroupByNameOrEmpty1(t *testing.T) {
func TestGroupByNameOrEmpty2(t *testing.T) { func TestGroupByNameOrEmpty2(t *testing.T) {
regex1 := W(regexp.MustCompile("0(?P<group1>A+)B(?P<group2>C+)(?P<group3>C+)?0")) regex1 := W(regexp.MustCompile(`0(?P<group1>A+)B(?P<group2>C+)(?P<group3>C+)?0`))
match1, ok1 := regex1.MatchFirst("Hello 0AAAABCCC0 Bye.") match1, ok1 := regex1.MatchFirst("Hello 0AAAABCCC0 Bye.")

View File

@@ -69,7 +69,7 @@ func (t *RFC3339Time) UnmarshalText(data []byte) error {
} }
func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bsontype.Null { if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct)) // we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values // Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597 // https://stackoverflow.com/questions/75167597
@@ -77,7 +77,7 @@ func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
*t = RFC3339Time{} *t = RFC3339Time{}
return nil return nil
} }
if bt != bsontype.DateTime { if bt != bson.TypeDateTime {
return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt)) return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt))
} }
var tt time.Time var tt time.Time

View File

@@ -69,7 +69,7 @@ func (t *RFC3339NanoTime) UnmarshalText(data []byte) error {
} }
func (t *RFC3339NanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { func (t *RFC3339NanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bsontype.Null { if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct)) // we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values // Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597 // https://stackoverflow.com/questions/75167597
@@ -77,7 +77,7 @@ func (t *RFC3339NanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) erro
*t = RFC3339NanoTime{} *t = RFC3339NanoTime{}
return nil return nil
} }
if bt != bsontype.DateTime { if bt != bson.TypeDateTime {
return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339NanoTime", bt)) return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339NanoTime", bt))
} }
var tt time.Time var tt time.Time

View File

@@ -2,6 +2,7 @@ package rfctime
import ( import (
"encoding/json" "encoding/json"
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
"gogs.mikescher.com/BlackForestBytes/goext/tst" "gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing" "testing"
"time" "time"
@@ -13,7 +14,7 @@ func TestRoundtrip(t *testing.T) {
Value RFC3339NanoTime `json:"v"` Value RFC3339NanoTime `json:"v"`
} }
val1 := NewRFC3339Nano(time.Unix(0, 1675951556820915171)) val1 := NewRFC3339Nano(time.Unix(0, 1675951556820915171).In(timeext.TimezoneBerlin))
w1 := Wrap{val1} w1 := Wrap{val1}
jstr1, err := json.Marshal(w1) jstr1, err := json.Marshal(w1)

View File

@@ -39,7 +39,7 @@ func HashSqliteSchema(ctx context.Context, schemaStr string) (string, error) {
return HashSqliteDatabase(ctx, db) return HashSqliteDatabase(ctx, db)
} }
func HashSqliteDatabase(ctx context.Context, db DB) (string, error) { func HashSqliteDatabase(ctx context.Context, db Queryable) (string, error) {
ss, err := CreateSqliteDatabaseSchemaString(ctx, db) ss, err := CreateSqliteDatabaseSchemaString(ctx, db)
if err != nil { if err != nil {
return "", err return "", err
@@ -50,7 +50,7 @@ func HashSqliteDatabase(ctx context.Context, db DB) (string, error) {
return hex.EncodeToString(cs[:]), nil return hex.EncodeToString(cs[:]), nil
} }
func CreateSqliteDatabaseSchemaString(ctx context.Context, db DB) (string, error) { func CreateSqliteDatabaseSchemaString(ctx context.Context, db Queryable) (string, error) {
type colInfo struct { type colInfo struct {
Name string `db:"name"` Name string `db:"name"`

View File

@@ -7,24 +7,40 @@ import (
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
type TxStatus string
const (
TxStatusInitial TxStatus = "INITIAL"
TxStatusActive TxStatus = "ACTIVE"
TxStatusComitted TxStatus = "COMMITTED"
TxStatusRollback TxStatus = "ROLLBACK"
)
type Tx interface { type Tx interface {
Rollback() error Rollback() error
Commit() error Commit() error
Status() TxStatus
Exec(ctx context.Context, sql string, prep PP) (sql.Result, error) Exec(ctx context.Context, sql string, prep PP) (sql.Result, error)
Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error) Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error)
} }
type transaction struct { type transaction struct {
tx *sqlx.Tx tx *sqlx.Tx
id uint16 id uint16
lstr []Listener lstr []Listener
status TxStatus
execCtr int
queryCtr int
} }
func NewTransaction(xtx *sqlx.Tx, txid uint16, lstr []Listener) Tx { func NewTransaction(xtx *sqlx.Tx, txid uint16, lstr []Listener) Tx {
return &transaction{ return &transaction{
tx: xtx, tx: xtx,
id: txid, id: txid,
lstr: lstr, lstr: lstr,
status: TxStatusInitial,
execCtr: 0,
queryCtr: 0,
} }
} }
@@ -38,6 +54,10 @@ func (tx *transaction) Rollback() error {
result := tx.tx.Rollback() result := tx.tx.Rollback()
if result == nil {
tx.status = TxStatusRollback
}
for _, v := range tx.lstr { for _, v := range tx.lstr {
v.PostTxRollback(tx.id, result) v.PostTxRollback(tx.id, result)
} }
@@ -55,6 +75,10 @@ func (tx *transaction) Commit() error {
result := tx.tx.Commit() result := tx.tx.Commit()
if result == nil {
tx.status = TxStatusComitted
}
for _, v := range tx.lstr { for _, v := range tx.lstr {
v.PostTxRollback(tx.id, result) v.PostTxRollback(tx.id, result)
} }
@@ -73,6 +97,10 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re
res, err := tx.tx.NamedExecContext(ctx, sqlstr, prep) res, err := tx.tx.NamedExecContext(ctx, sqlstr, prep)
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
for _, v := range tx.lstr { for _, v := range tx.lstr {
v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep) v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep)
} }
@@ -94,6 +122,10 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx
rows, err := sqlx.NamedQueryContext(ctx, tx.tx, sqlstr, prep) rows, err := sqlx.NamedQueryContext(ctx, tx.tx, sqlstr, prep)
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
for _, v := range tx.lstr { for _, v := range tx.lstr {
v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep) v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep)
} }
@@ -103,3 +135,11 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx
} }
return rows, nil return rows, nil
} }
func (tx *transaction) Status() TxStatus {
return tx.status
}
func (tx *transaction) Traffic() (int, int) {
return tx.execCtr, tx.queryCtr
}

View File

@@ -71,12 +71,12 @@ func SupportsColors() bool {
} }
} }
var term256Regex = regexp.MustCompile("(?i)-256(color)?$") var term256Regex = regexp.MustCompile(`(?i)-256(color)?$`)
if term256Regex.MatchString(termenv) { if term256Regex.MatchString(termenv) {
return true return true
} }
var termBasicRegex = regexp.MustCompile("(?i)^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux") var termBasicRegex = regexp.MustCompile(`(?i)^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux`)
if termBasicRegex.MatchString(termenv) { if termBasicRegex.MatchString(termenv) {
return true return true

View File

@@ -1,6 +1,7 @@
package termext package termext
import ( import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"math/rand" "math/rand"
"testing" "testing"
) )
@@ -32,9 +33,3 @@ func TestColor(t *testing.T) {
tst.AssertEqual(t, CleanString(Gray("test")), "test") tst.AssertEqual(t, CleanString(Gray("test")), "test")
tst.AssertEqual(t, CleanString(White("test")), "test") tst.AssertEqual(t, CleanString(White("test")), "test")
} }
func tst.AssertEqual(t *testing.T, actual string, expected string) {
if actual != expected {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
}
}

28
timeext/calendarweek.go Normal file
View File

@@ -0,0 +1,28 @@
package timeext
import "time"
func WeekStart(year, week int) time.Time {
// https://stackoverflow.com/a/52303730/1761622
// Start from the middle of the year:
t := time.Date(year, 7, 1, 0, 0, 0, 0, time.UTC)
// Roll back to Monday:
if wd := t.Weekday(); wd == time.Sunday {
t = t.AddDate(0, 0, -6)
} else {
t = t.AddDate(0, 0, -int(wd)+1)
}
// Difference in weeks:
_, w := t.ISOWeek()
t = t.AddDate(0, 0, (week-w)*7)
return t
}
func WeekEnd(year, week int) time.Time {
return WeekStart(year, week).AddDate(0, 0, 7).Add(time.Duration(-1))
}

View File

@@ -0,0 +1,25 @@
package timeext
import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
"time"
)
func TestWeekStart(t *testing.T) {
tst.AssertEqual(t, WeekStart(2018, 1).Format(time.RFC3339Nano), "2018-01-01T00:00:00Z")
tst.AssertEqual(t, WeekStart(2018, 2).Format(time.RFC3339Nano), "2018-01-08T00:00:00Z")
tst.AssertEqual(t, WeekStart(2019, 1).Format(time.RFC3339Nano), "2018-12-31T00:00:00Z")
tst.AssertEqual(t, WeekStart(2019, 2).Format(time.RFC3339Nano), "2019-01-07T00:00:00Z")
}
func TestWeekEnd(t *testing.T) {
tst.AssertEqual(t, WeekEnd(2018, 1).Format(time.RFC3339Nano), "2018-01-07T23:59:59.999999999Z")
tst.AssertEqual(t, WeekEnd(2018, 2).Format(time.RFC3339Nano), "2018-01-14T23:59:59.999999999Z")
tst.AssertEqual(t, WeekEnd(2019, 1).Format(time.RFC3339Nano), "2019-01-06T23:59:59.999999999Z")
tst.AssertEqual(t, WeekEnd(2019, 2).Format(time.RFC3339Nano), "2019-01-13T23:59:59.999999999Z")
}

View File

@@ -7,56 +7,56 @@ import (
func TestParseDurationShortString(t *testing.T) { func TestParseDurationShortString(t *testing.T) {
tst.AssertPDSSEqual(t, FromSeconds(1), "1s") assertPDSSEqual(t, FromSeconds(1), "1s")
tst.AssertPDSSEqual(t, FromSeconds(1), "1sec") assertPDSSEqual(t, FromSeconds(1), "1sec")
tst.AssertPDSSEqual(t, FromSeconds(1), "1second") assertPDSSEqual(t, FromSeconds(1), "1second")
tst.AssertPDSSEqual(t, FromSeconds(1), "1seconds") assertPDSSEqual(t, FromSeconds(1), "1seconds")
tst.AssertPDSSEqual(t, FromSeconds(100), "100second") assertPDSSEqual(t, FromSeconds(100), "100second")
tst.AssertPDSSEqual(t, FromSeconds(100), "100seconds") assertPDSSEqual(t, FromSeconds(100), "100seconds")
tst.AssertPDSSEqual(t, FromSeconds(1883639.77), "1883639.77second") assertPDSSEqual(t, FromSeconds(1883639.77), "1883639.77second")
tst.AssertPDSSEqual(t, FromSeconds(1883639.77), "1883639.77seconds") assertPDSSEqual(t, FromSeconds(1883639.77), "1883639.77seconds")
tst.AssertPDSSEqual(t, FromSeconds(50), "50s") assertPDSSEqual(t, FromSeconds(50), "50s")
tst.AssertPDSSEqual(t, FromSeconds(50), "50sec") assertPDSSEqual(t, FromSeconds(50), "50sec")
tst.AssertPDSSEqual(t, FromSeconds(1), "1second") assertPDSSEqual(t, FromSeconds(1), "1second")
tst.AssertPDSSEqual(t, FromSeconds(50), "50seconds") assertPDSSEqual(t, FromSeconds(50), "50seconds")
tst.AssertPDSSEqual(t, FromMinutes(10), "10m") assertPDSSEqual(t, FromMinutes(10), "10m")
tst.AssertPDSSEqual(t, FromMinutes(10), "10min") assertPDSSEqual(t, FromMinutes(10), "10min")
tst.AssertPDSSEqual(t, FromMinutes(1), "1minute") assertPDSSEqual(t, FromMinutes(1), "1minute")
tst.AssertPDSSEqual(t, FromMinutes(10), "10minutes") assertPDSSEqual(t, FromMinutes(10), "10minutes")
tst.AssertPDSSEqual(t, FromMinutes(10.5), "10.5minutes") assertPDSSEqual(t, FromMinutes(10.5), "10.5minutes")
tst.AssertPDSSEqual(t, FromMilliseconds(100), "100ms") assertPDSSEqual(t, FromMilliseconds(100), "100ms")
tst.AssertPDSSEqual(t, FromMilliseconds(100), "100milliseconds") assertPDSSEqual(t, FromMilliseconds(100), "100milliseconds")
tst.AssertPDSSEqual(t, FromMilliseconds(100), "100millisecond") assertPDSSEqual(t, FromMilliseconds(100), "100millisecond")
tst.AssertPDSSEqual(t, FromNanoseconds(99235), "99235ns") assertPDSSEqual(t, FromNanoseconds(99235), "99235ns")
tst.AssertPDSSEqual(t, FromNanoseconds(99235), "99235nanoseconds") assertPDSSEqual(t, FromNanoseconds(99235), "99235nanoseconds")
tst.AssertPDSSEqual(t, FromNanoseconds(99235), "99235nanosecond") assertPDSSEqual(t, FromNanoseconds(99235), "99235nanosecond")
tst.AssertPDSSEqual(t, FromMicroseconds(99235), "99235us") assertPDSSEqual(t, FromMicroseconds(99235), "99235us")
tst.AssertPDSSEqual(t, FromMicroseconds(99235), "99235microseconds") assertPDSSEqual(t, FromMicroseconds(99235), "99235microseconds")
tst.AssertPDSSEqual(t, FromMicroseconds(99235), "99235microsecond") assertPDSSEqual(t, FromMicroseconds(99235), "99235microsecond")
tst.AssertPDSSEqual(t, FromHours(1), "1h") assertPDSSEqual(t, FromHours(1), "1h")
tst.AssertPDSSEqual(t, FromHours(1), "1hour") assertPDSSEqual(t, FromHours(1), "1hour")
tst.AssertPDSSEqual(t, FromHours(2), "2hours") assertPDSSEqual(t, FromHours(2), "2hours")
tst.AssertPDSSEqual(t, FromDays(1), "1d") assertPDSSEqual(t, FromDays(1), "1d")
tst.AssertPDSSEqual(t, FromDays(1), "1day") assertPDSSEqual(t, FromDays(1), "1day")
tst.AssertPDSSEqual(t, FromDays(10), "10days") assertPDSSEqual(t, FromDays(10), "10days")
tst.AssertPDSSEqual(t, FromDays(1), "1days") assertPDSSEqual(t, FromDays(1), "1days")
tst.AssertPDSSEqual(t, FromDays(10), "10day") assertPDSSEqual(t, FromDays(10), "10day")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10)+FromSeconds(200), "1d10m200sec") assertPDSSEqual(t, FromDays(1)+FromMinutes(10)+FromSeconds(200), "1d10m200sec")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d:10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d:10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d 10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d 10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d,10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d,10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d, 10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d, 10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromSeconds(1000), "1d 1000seconds") assertPDSSEqual(t, FromDays(1)+FromSeconds(1000), "1d 1000seconds")
tst.AssertPDSSEqual(t, FromDays(1), "86400s") assertPDSSEqual(t, FromDays(1), "86400s")
} }
func assertPDSSEqual(t *testing.T, expected time.Duration, fmt string) { func assertPDSSEqual(t *testing.T, expected time.Duration, fmt string) {

View File

@@ -22,6 +22,17 @@ func TimeToDatePart(t time.Time, tz *time.Location) time.Time {
return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
} }
// TimeToDayStart returns a timestamp at the start of the day which contains t (= 00:00:00)
func TimeToDayStart(t time.Time, tz *time.Location) time.Time {
t = t.In(tz)
return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
}
// TimeToDayEnd returns a timestamp at the end of the day which contains t (= 23:59:59)
func TimeToDayEnd(t time.Time, tz *time.Location) time.Time {
return TimeToDayStart(t, tz).AddDate(0, 0, 1).Add(-1)
}
// TimeToWeekStart returns a timestamp at the start of the week which contains t (= Monday 00:00:00) // TimeToWeekStart returns a timestamp at the start of the week which contains t (= Monday 00:00:00)
func TimeToWeekStart(t time.Time, tz *time.Location) time.Time { func TimeToWeekStart(t time.Time, tz *time.Location) time.Time {
t = TimeToDatePart(t, tz) t = TimeToDatePart(t, tz)

View File

@@ -2,23 +2,59 @@ package tst
import ( import (
"encoding/hex" "encoding/hex"
"reflect"
"runtime/debug" "runtime/debug"
"testing" "testing"
) )
func AssertEqual[T comparable](t *testing.T, actual T, expected T) { func AssertEqual[T comparable](t *testing.T, actual T, expected T) {
t.Helper()
if actual != expected { if actual != expected {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected) t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
} }
} }
func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) { func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) {
t.Helper()
if actual == expected { if actual == expected {
t.Errorf("values do not differ: Actual: '%v', Expected: '%v'", actual, expected) t.Errorf("values do not differ: Actual: '%v', Expected: '%v'", actual, expected)
} }
} }
func AssertDeepEqual[T any](t *testing.T, actual T, expected T) {
t.Helper()
if reflect.DeepEqual(actual, expected) {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
}
}
func AssertSetDeepEqual[T any](t *testing.T, actual []T, expected []T) {
t.Helper()
if len(actual) != len(expected) {
t.Errorf("values differ in length: Actual (n=%d): '%v', Expected (n=%d): '%v'", len(actual), actual, len(expected), expected)
}
for _, a := range expected {
found := false
for _, b := range actual {
found = found || reflect.DeepEqual(a, b)
}
if !found {
t.Errorf("values differ: Element '%v' not found. Actual: '%v', Expected: '%v'", a, actual, expected)
return
}
}
}
func AssertNotDeepEqual[T any](t *testing.T, actual T, expected T) {
t.Helper()
if !reflect.DeepEqual(actual, expected) {
t.Errorf("values do not differ: Actual: '%v', Expected: '%v'", actual, expected)
}
}
func AssertDeRefEqual[T comparable](t *testing.T, actual *T, expected T) { func AssertDeRefEqual[T comparable](t *testing.T, actual *T, expected T) {
t.Helper()
if actual == nil { if actual == nil {
t.Errorf("values differ: Actual: NIL, Expected: '%v'", expected) t.Errorf("values differ: Actual: NIL, Expected: '%v'", expected)
} }
@@ -28,6 +64,7 @@ func AssertDeRefEqual[T comparable](t *testing.T, actual *T, expected T) {
} }
func AssertPtrEqual[T comparable](t *testing.T, actual *T, expected *T) { func AssertPtrEqual[T comparable](t *testing.T, actual *T, expected *T) {
t.Helper()
if actual == nil && expected == nil { if actual == nil && expected == nil {
return return
} }
@@ -47,6 +84,7 @@ func AssertPtrEqual[T comparable](t *testing.T, actual *T, expected *T) {
} }
func AssertHexEqual(t *testing.T, expected string, actual []byte) { func AssertHexEqual(t *testing.T, expected string, actual []byte) {
t.Helper()
actualStr := hex.EncodeToString(actual) actualStr := hex.EncodeToString(actual)
if actualStr != expected { if actualStr != expected {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actualStr, expected) t.Errorf("values differ: Actual: '%v', Expected: '%v'", actualStr, expected)
@@ -54,18 +92,21 @@ func AssertHexEqual(t *testing.T, expected string, actual []byte) {
} }
func AssertTrue(t *testing.T, value bool) { func AssertTrue(t *testing.T, value bool) {
t.Helper()
if !value { if !value {
t.Error("value should be true\n" + string(debug.Stack())) t.Error("value should be true\n" + string(debug.Stack()))
} }
} }
func AssertFalse(t *testing.T, value bool) { func AssertFalse(t *testing.T, value bool) {
t.Helper()
if value { if value {
t.Error("value should be false\n" + string(debug.Stack())) t.Error("value should be false\n" + string(debug.Stack()))
} }
} }
func AssertNoErr(t *testing.T, anerr error) { func AssertNoErr(t *testing.T, anerr error) {
t.Helper()
if anerr != nil { if anerr != nil {
t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack())) t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack()))
} }

View File

@@ -5,6 +5,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsontype" "go.mongodb.org/mongo-driver/bson/bsontype"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect" "reflect"
) )
@@ -66,21 +67,25 @@ func (c *Coll[TData]) Indexes() mongo.IndexView {
} }
func (c *Coll[TData]) Drop(ctx context.Context) error { func (c *Coll[TData]) Drop(ctx context.Context) error {
return c.coll.Drop(ctx) err := c.coll.Drop(ctx)
if err != nil {
return exerr.Wrap(err, "failed to drop collection").Str("collection", c.Name()).Build()
}
return nil
} }
func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) {
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)
if err != nil { if err != nil {
return ct.CursorToken{}, err return ct.CursorToken{}, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
} }
valueSeconary := "" valueSeconary := ""
if fieldSecondary != nil && dirSecondary != nil { if fieldSecondary != nil && dirSecondary != nil {
valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary) valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary)
if err != nil { if err != nil {
return ct.CursorToken{}, err return ct.CursorToken{}, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
} }
} }

View File

@@ -2,12 +2,17 @@ package wmo
import ( import (
"context" "context"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) { func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) {
if c.customDecoder != nil { if c.customDecoder != nil {
return (*c.customDecoder)(ctx, dec) res, err := (*c.customDecoder)(ctx, dec)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
}
return res, nil
} else { } else {
@@ -15,7 +20,7 @@ func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, e
err := dec.Decode(&res) err := dec.Decode(&res)
if err != nil { if err != nil {
return *new(TData), err return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build()
} }
return res, nil return res, nil
@@ -31,7 +36,7 @@ func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData
for cursor.Next(ctx) { for cursor.Next(ctx) {
entry, err := (*c.customDecoder)(ctx, cursor) entry, err := (*c.customDecoder)(ctx, cursor)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to decode entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
} }
res = append(res, entry) res = append(res, entry)
} }
@@ -44,7 +49,7 @@ func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData
err := cursor.All(ctx, &res) err := cursor.All(ctx, &res)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build()
} }
return res, nil return res, nil

View File

@@ -1,9 +1,9 @@
package wmo package wmo
import ( import (
"errors"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) { func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
@@ -13,7 +13,7 @@ func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldP
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary) valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
} }
if sortPrimary == ct.SortASC { if sortPrimary == ct.SortASC {
@@ -30,7 +30,7 @@ func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldP
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary) valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
} }
if *sortSecondary == ct.SortASC { if *sortSecondary == ct.SortASC {
@@ -73,7 +73,7 @@ func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldP
} else { } else {
return nil, errors.New("unknown ct mode: " + string(token.Mode)) return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
} }

View File

@@ -2,20 +2,20 @@ package wmo
import ( import (
"context" "context"
"errors"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) {
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
} }
res, err := c.decodeAll(ctx, cursor) res, err := c.decodeAll(ctx, cursor)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to decode values").Build()
} }
return res, nil return res, nil
@@ -24,13 +24,13 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op
func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) {
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
} }
if cursor.Next(ctx) { if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor) v, err := c.decodeSingle(ctx, cursor)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to decode single value").Build()
} }
return &v, nil return &v, nil
} }
@@ -41,16 +41,16 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli
func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) {
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil { if err != nil {
return *new(TData), err return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
} }
if cursor.Next(ctx) { if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor) v, err := c.decodeSingle(ctx, cursor)
if err != nil { if err != nil {
return *new(TData), err return *new(TData), exerr.Wrap(err, "failed to decode single value").Build()
} }
return v, nil return v, nil
} }
return *new(TData), errors.New("no document in result") return *new(TData), exerr.Wrap(mongo.ErrNoDocuments, "no document in result").Build()
} }

View File

@@ -4,12 +4,13 @@ import (
"context" "context"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) DeleteOneByID(ctx context.Context, id EntityID) error { func (c *Coll[TData]) DeleteOneByID(ctx context.Context, id EntityID) error {
_, err := c.coll.DeleteOne(ctx, bson.M{"_id": id}) _, err := c.coll.DeleteOne(ctx, bson.M{"_id": id})
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[delete-one-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
} }
return nil return nil
@@ -18,7 +19,7 @@ func (c *Coll[TData]) DeleteOneByID(ctx context.Context, id EntityID) error {
func (c *Coll[TData]) DeleteOne(ctx context.Context, filterQuery bson.M) error { func (c *Coll[TData]) DeleteOne(ctx context.Context, filterQuery bson.M) error {
_, err := c.coll.DeleteOne(ctx, filterQuery) _, err := c.coll.DeleteOne(ctx, filterQuery)
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[delete-one] failed").Any("filterQuery", filterQuery).Str("collection", c.Name()).Build()
} }
return nil return nil
@@ -27,7 +28,7 @@ func (c *Coll[TData]) DeleteOne(ctx context.Context, filterQuery bson.M) error {
func (c *Coll[TData]) DeleteMany(ctx context.Context, filterQuery bson.M) (*mongo.DeleteResult, error) { func (c *Coll[TData]) DeleteMany(ctx context.Context, filterQuery bson.M) (*mongo.DeleteResult, error) {
res, err := c.coll.DeleteMany(ctx, filterQuery) res, err := c.coll.DeleteMany(ctx, filterQuery)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-query[delete-many] failed").Any("filterQuery", filterQuery).Str("collection", c.Name()).Build()
} }
return res, nil return res, nil

View File

@@ -2,13 +2,21 @@ package wmo
import ( import (
"context" "context"
"errors"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
mongoRes := c.coll.FindOne(ctx, filter) mongoRes := c.coll.FindOne(ctx, filter)
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").
Str("collection", c.Name()).
Any("filter", filter).
Build()
}
return c.decodeSingle(ctx, mongoRes) return c.decodeSingle(ctx, mongoRes)
} }
@@ -17,11 +25,11 @@ func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, er
mongoRes := c.coll.FindOne(ctx, filter) mongoRes := c.coll.FindOne(ctx, filter)
res, err := c.decodeSingle(ctx, mongoRes) res, err := c.decodeSingle(ctx, mongoRes)
if err == mongo.ErrNoDocuments { if errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil return nil, nil
} }
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Str("collection", c.Name()).Build()
} }
return &res, nil return &res, nil
@@ -29,6 +37,12 @@ func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, er
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) { func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id}) mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").
Str("collection", c.Name()).
Id("id", id).
Build()
}
return c.decodeSingle(ctx, mongoRes) return c.decodeSingle(ctx, mongoRes)
} }
@@ -37,11 +51,11 @@ func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData,
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id}) mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
res, err := c.decodeSingle(ctx, mongoRes) res, err := c.decodeSingle(ctx, mongoRes)
if err == mongo.ErrNoDocuments { if errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil return nil, nil
} }
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
} }
return &res, nil return &res, nil
@@ -50,12 +64,12 @@ func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData,
func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) {
cursor, err := c.coll.Find(ctx, filter, opts...) cursor, err := c.coll.Find(ctx, filter, opts...)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Any("opts", opts).Str("collection", c.Name()).Build()
} }
res, err := c.decodeAll(ctx, cursor) res, err := c.decodeAll(ctx, cursor)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to decode values").Build()
} }
return res, nil return res, nil

View File

@@ -4,13 +4,14 @@ import (
"context" "context"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) { func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) {
insRes, err := c.coll.InsertOne(ctx, valueIn) insRes, err := c.coll.InsertOne(ctx, valueIn)
if err != nil { if err != nil {
return *new(TData), err return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
} }
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID}) mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID})
@@ -19,5 +20,10 @@ func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, erro
} }
func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) {
return c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn))
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build()
}
return insRes, nil
} }

View File

@@ -5,6 +5,7 @@ import (
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
@@ -35,21 +36,31 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.
Wrap(err, "failed to create pagination").
WithType(exerr.TypeCursorTokenDecode).
Str("collection", c.Name()).
Any("inTok", inTok).
Any("sortPrimary", sortPrimary).
Any("sortDirPrimary", sortDirPrimary).
Any("sortSecondary", sortSecondary).
Any("sortDirSecondary", sortDirSecondary).
Any("pageSize", pageSize).
Build()
} }
pipeline = append(pipeline, paginationPipeline...) pipeline = append(pipeline, paginationPipeline...)
cursor, err := c.coll.Aggregate(ctx, pipeline) cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
} }
// fast branch // fast branch
if pageSize == nil { if pageSize == nil {
entries, err := c.decodeAll(ctx, cursor) entries, err := c.decodeAll(ctx, cursor)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build()
} }
return entries, ct.End(), nil return entries, ct.End(), nil
} }
@@ -59,7 +70,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
var entry TData var entry TData
entry, err = c.decodeSingle(ctx, cursor) entry, err = c.decodeSingle(ctx, cursor)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build()
} }
entities = append(entities, entry) entities = append(entities, entry)
} }
@@ -74,7 +85,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build()
} }
return entities, nextToken, nil return entities, nextToken, nil
@@ -91,17 +102,31 @@ func (c *Coll[TData]) Count(ctx context.Context, filter ct.Filter) (int64, error
cursor, err := c.coll.Aggregate(ctx, pipeline) cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil { if err != nil {
return 0, err return 0, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
} }
if cursor.Next(ctx) { if cursor.Next(ctx) {
v := countRes{} v := countRes{}
err = cursor.Decode(&v) err = cursor.Decode(&v)
if err != nil { if err != nil {
return 0, err return 0, exerr.Wrap(err, "failed to decode entity").Build()
} }
return v.Count, nil return v.Count, nil
} }
return 0, nil return 0, nil
} }
func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, int64, error) {
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
count, err := c.Count(ctx, filter)
if err != nil {
return nil, ct.CursorToken{}, 0, err
}
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, ct.CursorToken{}, 0, err
}
return data, token, count, nil
}

View File

@@ -5,10 +5,18 @@ import (
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (TData, error) { func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (TData, error) {
mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, updateQuery, options.FindOneAndUpdate().SetReturnDocument(options.After)) mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, updateQuery, options.FindOneAndUpdate().SetReturnDocument(options.After))
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-and-update] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Any("updateQuery", updateQuery).
Build()
}
return c.decodeSingle(ctx, mongoRes) return c.decodeSingle(ctx, mongoRes)
} }
@@ -16,7 +24,11 @@ func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M,
func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error { func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error {
_, err := c.coll.UpdateOne(ctx, filterQuery, updateQuery) _, err := c.coll.UpdateOne(ctx, filterQuery, updateQuery)
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[update-one] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Any("updateQuery", updateQuery).
Build()
} }
return nil return nil
@@ -25,7 +37,11 @@ func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQ
func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuery bson.M) error { func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuery bson.M) error {
_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery) _, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery)
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[update-one-by-id] failed").
Str("collection", c.Name()).
Id("id", id).
Any("updateQuery", updateQuery).
Build()
} }
return nil return nil
@@ -34,7 +50,11 @@ func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuer
func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (*mongo.UpdateResult, error) { func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (*mongo.UpdateResult, error) {
res, err := c.coll.UpdateMany(ctx, filterQuery, updateQuery) res, err := c.coll.UpdateMany(ctx, filterQuery, updateQuery)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-query[update-many] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Any("updateQuery", updateQuery).
Build()
} }
return res, nil return res, nil
@@ -43,7 +63,10 @@ func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, update
func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error { func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error {
_, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value}) _, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value})
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[replace-one] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Build()
} }
return nil return nil
@@ -51,6 +74,12 @@ func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value
func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) { func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) {
mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, bson.M{"$set": value}, options.FindOneAndUpdate().SetReturnDocument(options.After)) mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, bson.M{"$set": value}, options.FindOneAndUpdate().SetReturnDocument(options.After))
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-and-update] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Build()
}
return c.decodeSingle(ctx, mongoRes) return c.decodeSingle(ctx, mongoRes)
} }

View File

@@ -1,7 +1,7 @@
package wmo package wmo
import ( import (
"errors" "gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/reflectext" "gogs.mikescher.com/BlackForestBytes/goext/reflectext"
"reflect" "reflect"
@@ -25,7 +25,7 @@ func (c *Coll[TData]) EnsureInitializedReflection(v TData) {
m := make(map[string]fullTypeRef) m := make(map[string]fullTypeRef)
c.initFields("", rval, m, make([]int, 0)) c.initFields("", rval.Type(), m, make([]int, 0), make([]reflect.Type, 0))
c.implDataTypeMap[rval.Type()] = m c.implDataTypeMap[rval.Type()] = m
} }
@@ -50,20 +50,16 @@ func (c *Coll[TData]) init() {
c.implDataTypeMap = make(map[reflect.Type]map[string]fullTypeRef) c.implDataTypeMap = make(map[reflect.Type]map[string]fullTypeRef)
v := reflect.ValueOf(example) v := reflect.ValueOf(example)
c.initFields("", v, c.dataTypeMap, make([]int, 0)) c.initFields("", v.Type(), c.dataTypeMap, make([]int, 0), make([]reflect.Type, 0))
} }
} }
func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, m map[string]fullTypeRef, idxarr []int) { func (c *Coll[TData]) initFields(prefix string, rtyp reflect.Type, m map[string]fullTypeRef, idxarr []int, typesInPath []reflect.Type) {
rtyp := rval.Type()
for i := 0; i < rtyp.NumField(); i++ { for i := 0; i < rtyp.NumField(); i++ {
rsfield := rtyp.Field(i) rsfield := rtyp.Field(i)
rvfield := rval.Field(i)
if !rsfield.IsExported() { if !rsfield.IsExported() {
continue continue
@@ -91,21 +87,21 @@ func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, m map[string
newIdxArr := langext.ArrCopy(idxarr) newIdxArr := langext.ArrCopy(idxarr)
newIdxArr = append(newIdxArr, i) newIdxArr = append(newIdxArr, i)
if langext.InArray("inline", bsontags) && rvfield.Kind() == reflect.Struct { if langext.InArray("inline", bsontags) && rsfield.Type.Kind() == reflect.Struct {
// pass-through field // pass-through field
c.initFields(prefix, rvfield, m, newIdxArr) c.initFields(prefix, rsfield.Type, m, newIdxArr, typesInPath)
} else { } else {
if rvfield.Type().Kind() == reflect.Pointer { if rsfield.Type.Kind() == reflect.Pointer {
m[fullKey] = fullTypeRef{ m[fullKey] = fullTypeRef{
IsPointer: true, IsPointer: true,
RealType: rvfield.Type(), RealType: rsfield.Type,
Kind: rvfield.Type().Elem().Kind(), Kind: rsfield.Type.Elem().Kind(),
Type: rvfield.Type().Elem(), Type: rsfield.Type.Elem(),
UnderlyingType: reflectext.Underlying(rvfield.Type().Elem()), UnderlyingType: reflectext.Underlying(rsfield.Type.Elem()),
Name: rsfield.Name, Name: rsfield.Name,
Index: newIdxArr, Index: newIdxArr,
} }
@@ -114,20 +110,37 @@ func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, m map[string
m[fullKey] = fullTypeRef{ m[fullKey] = fullTypeRef{
IsPointer: false, IsPointer: false,
RealType: rvfield.Type(), RealType: rsfield.Type,
Kind: rvfield.Type().Kind(), Kind: rsfield.Type.Kind(),
Type: rvfield.Type(), Type: rsfield.Type,
UnderlyingType: reflectext.Underlying(rvfield.Type()), UnderlyingType: reflectext.Underlying(rsfield.Type),
Name: rsfield.Name, Name: rsfield.Name,
Index: newIdxArr, Index: newIdxArr,
} }
} }
if rvfield.Kind() == reflect.Struct { if rsfield.Type.Kind() == reflect.Struct {
c.initFields(fullKey+".", rvfield, m, newIdxArr) c.initFields(fullKey+".", rsfield.Type, m, newIdxArr, typesInPath)
} }
if rsfield.Type.Kind() == reflect.Pointer && rsfield.Type.Elem().Kind() == reflect.Struct {
innerType := rsfield.Type.Elem()
// check if there is recursion
recursion := false
for _, typ := range typesInPath {
recursion = recursion || (typ == innerType)
}
if !recursion {
// Store all seen types before that deref a pointer to prevent endless recursion
newTypesInPath := make([]reflect.Type, len(typesInPath))
copy(newTypesInPath, typesInPath)
newTypesInPath = append(newTypesInPath, rtyp)
c.initFields(fullKey+".", innerType, m, newIdxArr, newTypesInPath)
}
}
} }
} }
@@ -138,7 +151,7 @@ func (c *Coll[TData]) getTokenValueAsMongoType(value string, fieldName string) (
fref, err := c.getFieldType(fieldName) fref, err := c.getFieldType(fieldName)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to get-field-type").Str("fieldName", fieldName).Build()
} }
pss := reflectext.PrimitiveStringSerializer{} pss := reflectext.PrimitiveStringSerializer{}
@@ -151,7 +164,7 @@ func (c *Coll[TData]) getFieldValueAsTokenString(entity TData, fieldName string)
realValue, err := c.getFieldValue(entity, fieldName) realValue, err := c.getFieldValue(entity, fieldName)
if err != nil { if err != nil {
return "", err return "", exerr.Wrap(err, "failed to get-field-value").Str("fieldName", fieldName).Build()
} }
pss := reflectext.PrimitiveStringSerializer{} pss := reflectext.PrimitiveStringSerializer{}
@@ -169,14 +182,14 @@ func (c *Coll[TData]) getFieldType(fieldName string) (fullTypeRef, error) {
} }
} }
return fullTypeRef{}, errors.New("unknown field: '" + fieldName + "' (in any impl)") return fullTypeRef{}, exerr.New(exerr.TypeMongoReflection, "unknown field: '"+fieldName+"' (in any impl)").Str("fieldName", fieldName).Build()
} else { } else {
if r, ok := c.dataTypeMap[fieldName]; ok { if r, ok := c.dataTypeMap[fieldName]; ok {
return r, nil return r, nil
} else { } else {
return fullTypeRef{}, errors.New("unknown field: '" + fieldName + "'") return fullTypeRef{}, exerr.New(exerr.TypeMongoReflection, "unknown field: '"+fieldName+"'").Str("fieldName", fieldName).Build()
} }
} }
@@ -196,10 +209,10 @@ func (c *Coll[TData]) getFieldValue(data TData, fieldName string) (any, error) {
rval := reflect.ValueOf(data) rval := reflect.ValueOf(data)
return rval.FieldByIndex(fref.Index).Interface(), nil return rval.FieldByIndex(fref.Index).Interface(), nil
} else { } else {
return nil, errors.New("unknown bson field '" + fieldName + "' in type '" + rval.Type().String() + "'") return nil, exerr.New(exerr.TypeMongoReflection, "unknown bson field '"+fieldName+"' in type '"+rval.Type().String()+"'").Str("fieldName", fieldName).Type("rval", rval).Build()
} }
} else { } else {
return nil, errors.New("unknown TData type: '" + rval.Type().String() + "'") return nil, exerr.New(exerr.TypeMongoReflection, "unknown TData type: '"+rval.Type().String()+"'").Type("rval", rval).Build()
} }
} else { } else {
@@ -208,7 +221,7 @@ func (c *Coll[TData]) getFieldValue(data TData, fieldName string) (any, error) {
rval := reflect.ValueOf(data) rval := reflect.ValueOf(data)
return rval.FieldByIndex(fref.Index).Interface(), nil return rval.FieldByIndex(fref.Index).Interface(), nil
} else { } else {
return nil, errors.New("unknown bson field '" + fieldName + "'") return nil, exerr.New(exerr.TypeMongoReflection, "unknown bson field '"+fieldName+"'").Str("fieldName", fieldName).Build()
} }
} }

View File

@@ -23,6 +23,9 @@ func TestReflectionGetFieldType(t *testing.T) {
Sub struct { Sub struct {
A string `bson:"a"` A string `bson:"a"`
} `bson:"sub"` } `bson:"sub"`
SubPtr *struct {
A string `bson:"a"`
} `bson:"subPtr"`
Str string `bson:"str"` Str string `bson:"str"`
Ptr *int `bson:"ptr"` Ptr *int `bson:"ptr"`
MDate rfctime.RFC3339NanoTime `bson:"mdate"` MDate rfctime.RFC3339NanoTime `bson:"mdate"`
@@ -43,6 +46,11 @@ func TestReflectionGetFieldType(t *testing.T) {
}{ }{
A: "2", A: "2",
}, },
SubPtr: &struct {
A string `bson:"a"`
}{
A: "4",
},
Str: "3", Str: "3",
Ptr: langext.Ptr(4), Ptr: langext.Ptr(4),
MDate: t1, MDate: t1,
@@ -82,6 +90,12 @@ func TestReflectionGetFieldType(t *testing.T) {
tst.AssertEqual(t, gft("sub.a").IsPointer, false) tst.AssertEqual(t, gft("sub.a").IsPointer, false)
tst.AssertEqual(t, gfv("sub.a").(string), "2") tst.AssertEqual(t, gfv("sub.a").(string), "2")
tst.AssertEqual(t, gft("subPtr.a").Kind.String(), "string")
tst.AssertEqual(t, gft("subPtr.a").Type.String(), "string")
tst.AssertEqual(t, gft("subPtr.a").Name, "A")
tst.AssertEqual(t, gft("subPtr.a").IsPointer, false)
tst.AssertEqual(t, gfv("subPtr.a").(string), "4")
tst.AssertEqual(t, gft("str").Kind.String(), "string") tst.AssertEqual(t, gft("str").Kind.String(), "string")
tst.AssertEqual(t, gft("str").Type.String(), "string") tst.AssertEqual(t, gft("str").Type.String(), "string")
tst.AssertEqual(t, gft("str").Name, "Str") tst.AssertEqual(t, gft("str").Name, "Str")
@@ -99,16 +113,25 @@ func TestReflectionGetTokenValueAsMongoType(t *testing.T) {
type IDType string type IDType string
type RecurseiveType struct {
Other int `bson:"other"`
Inner *RecurseiveType `bson:"inner"`
}
type TestData struct { type TestData struct {
ID IDType `bson:"_id"` ID IDType `bson:"_id"`
CDate time.Time `bson:"cdate"` CDate time.Time `bson:"cdate"`
Sub struct { Sub struct {
A string `bson:"a"` A string `bson:"a"`
} `bson:"sub"` } `bson:"sub"`
SubPtr *struct {
A string `bson:"a"`
} `bson:"subPtr"`
Str string `bson:"str"` Str string `bson:"str"`
Ptr *int `bson:"ptr"` Ptr *int `bson:"ptr"`
Num int `bson:"num"` Num int `bson:"num"`
MDate rfctime.RFC3339NanoTime `bson:"mdate"` MDate rfctime.RFC3339NanoTime `bson:"mdate"`
Rec RecurseiveType `bson:"rec"`
} }
coll := W[TestData](&mongo.Collection{}) coll := W[TestData](&mongo.Collection{})
@@ -130,6 +153,9 @@ func TestReflectionGetTokenValueAsMongoType(t *testing.T) {
} }
tst.AssertEqual(t, gtvasmt("hello", "str").(string), "hello") tst.AssertEqual(t, gtvasmt("hello", "str").(string), "hello")
tst.AssertEqual(t, gtvasmt("hello", "sub.a").(string), "hello")
tst.AssertEqual(t, gtvasmt("hello", "subPtr.a").(string), "hello")
tst.AssertEqual(t, gtvasmt("4", "rec.other").(int), 4)
tst.AssertEqual(t, gtvasmt("4", "num").(int), 4) tst.AssertEqual(t, gtvasmt("4", "num").(int), 4)
tst.AssertEqual(t, gtvasmt("asdf", "_id").(IDType), "asdf") tst.AssertEqual(t, gtvasmt("asdf", "_id").(IDType), "asdf")
tst.AssertEqual(t, gtvasmt("", "ptr").(*int), nil) tst.AssertEqual(t, gtvasmt("", "ptr").(*int), nil)