Compare commits
3 Commits
v0.0.339
...
feature/mo
Author | SHA1 | Date | |
---|---|---|---|
9d88ab3a2b
|
|||
2ec88e81f3
|
|||
d471d7c396
|
@@ -1,55 +0,0 @@
|
||||
|
||||
# https://docs.gitea.com/next/usage/actions/quickstart
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions
|
||||
# https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
|
||||
|
||||
name: Build Docker and Deploy
|
||||
run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
- '**'
|
||||
|
||||
|
||||
jobs:
|
||||
run_tests:
|
||||
name: Run goext test-suite
|
||||
runs-on: bfb-cicd-latest
|
||||
steps:
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: '${{ gitea.workspace }}/go.mod'
|
||||
|
||||
- name: Setup packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@latest
|
||||
with:
|
||||
packages: curl python3
|
||||
version: 1.0
|
||||
|
||||
- name: go version
|
||||
run: go version
|
||||
|
||||
- name: Run tests
|
||||
run: cd "${{ gitea.workspace }}" && make test
|
||||
|
||||
- name: Send failure mail
|
||||
if: failure()
|
||||
uses: dawidd6/action-send-mail@v3
|
||||
with:
|
||||
server_address: smtp.fastmail.com
|
||||
server_port: 465
|
||||
secure: true
|
||||
username: ${{secrets.MAIL_USERNAME}}
|
||||
password: ${{secrets.MAIL_PASSWORD}}
|
||||
subject: Pipeline on '${{ gitea.repository }}' failed
|
||||
to: ${{ steps.commiter_info.outputs.MAIL }}
|
||||
from: Gitea Actions <gitea_actions@blackforestbytes.de>
|
||||
body: "Go to https://gogs.blackforestbytes.com/${{ gitea.repository }}/actions"
|
||||
|
6
.idea/golinter.xml
generated
6
.idea/golinter.xml
generated
@@ -1,6 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="GoLinterSettings">
|
||||
<option name="checkGoLinterExe" value="false" />
|
||||
</component>
|
||||
</project>
|
15
Makefile
15
Makefile
@@ -1,17 +1,16 @@
|
||||
|
||||
.PHONY: run test version update-mongo
|
||||
|
||||
run:
|
||||
echo "This is a library - can't be run" && false
|
||||
|
||||
test:
|
||||
# go test ./...
|
||||
which gotestsum || go install gotest.tools/gotestsum@latest
|
||||
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./..."
|
||||
|
||||
test-in-docker:
|
||||
tag="goext_temp_test_image:$(shell uuidgen | tr -d '-')"; \
|
||||
docker build --tag $$tag . -f .gitea/workflows/Dockerfile_tests; \
|
||||
docker run --rm $$tag; \
|
||||
docker rmi $$tag
|
||||
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test"
|
||||
|
||||
version:
|
||||
_data/version.sh
|
||||
_data/version.sh
|
||||
|
||||
update-mongo:
|
||||
_data/update-mongo.sh
|
60
README.md
60
README.md
@@ -10,34 +10,32 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
|
||||
|
||||
### Packages:
|
||||
|
||||
| Name | Maintainer | Description |
|
||||
|-------------|------------|---------------------------------------------------------------------------------------------------------------|
|
||||
| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) |
|
||||
| mathext | Mike | Utility/Helper functions for math |
|
||||
| cryptext | Mike | Utility/Helper functions for encryption |
|
||||
| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels |
|
||||
| dataext | Mike | Various useful data structures |
|
||||
| zipext | Mike | Utility for zip/gzip/tar etc |
|
||||
| reflectext | Mike | Utility for golang reflection |
|
||||
| fsext | Mike | Utility for filesytem access |
|
||||
| | | |
|
||||
| mongoext | Mike | Utility/Helper functions for mongodb |
|
||||
| cursortoken | Mike | MongoDB cursortoken implementation |
|
||||
| pagination | Mike | Pagination implementation |
|
||||
| | | |
|
||||
| totpext | Mike | Implementation of TOTP (2-Factor-Auth) |
|
||||
| termext | Mike | Utilities for terminals (mostly color output) |
|
||||
| confext | Mike | Parses environment configuration into structs |
|
||||
| cmdext | Mike | Runner for external commands/processes |
|
||||
| | | |
|
||||
| sq | Mike | Utility functions for sql based databases |
|
||||
| tst | Mike | Utility functions for unit tests |
|
||||
| | | |
|
||||
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
|
||||
| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps |
|
||||
| | | |
|
||||
| bfcodegen | Mike | Various codegen tools (run via go generate) |
|
||||
| | | |
|
||||
| rext | Mike | Regex Wrapper, wraps regexp with a better interface |
|
||||
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
|
||||
| | | |
|
||||
| Name | Maintainer | Description |
|
||||
|--------------|------------|---------------------------------------------------------------------------------------------------------------|
|
||||
| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) |
|
||||
| mathext | Mike | Utility/Helper functions for math |
|
||||
| cryptext | Mike | Utility/Helper functions for encryption |
|
||||
| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels |
|
||||
| dataext | Mike | Various useful data structures |
|
||||
| zipext | Mike | Utility for zip/gzip/tar etc |
|
||||
| reflectext | Mike | Utility for golagn reflection |
|
||||
| | | |
|
||||
| mongoext | Mike | Utility/Helper functions for mongodb |
|
||||
| cursortoken | Mike | MongoDB cursortoken implementation |
|
||||
| | | |
|
||||
| totpext | Mike | Implementation of TOTP (2-Factor-Auth) |
|
||||
| termext | Mike | Utilities for terminals (mostly color output) |
|
||||
| confext | Mike | Parses environment configuration into structs |
|
||||
| cmdext | Mike | Runner for external commands/processes |
|
||||
| | | |
|
||||
| sq | Mike | Utility functions for sql based databases |
|
||||
| tst | Mike | Utility functions for unit tests |
|
||||
| | | |
|
||||
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
|
||||
| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps |
|
||||
| | | |
|
||||
| bfcodegen | Mike | Various codegen tools (run via go generate) |
|
||||
| | | |
|
||||
| rext | Mike | Regex Wrapper, wraps regexp with a better interface |
|
||||
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
|
||||
| | | |
|
12
TODO.md
12
TODO.md
@@ -2,6 +2,12 @@
|
||||
|
||||
- cronext
|
||||
|
||||
- rfctime.DateOnly
|
||||
- rfctime.HMSTimeOnly
|
||||
- rfctime.NanoTimeOnly
|
||||
- cursortoken
|
||||
|
||||
- typed/geenric mongo wrapper
|
||||
|
||||
- error package
|
||||
|
||||
- rfctime.DateOnly
|
||||
- rfctime.HMSTimeOnly
|
||||
- rfctime.NanoTimeOnly
|
80
_data/mongo.patch
Normal file
80
_data/mongo.patch
Normal file
@@ -0,0 +1,80 @@
|
||||
diff --git a/mongo/bson/bsoncodec/struct_codec.go b/mongo/bson/bsoncodec/struct_codec.go
|
||||
--- a/mongo/bson/bsoncodec/struct_codec.go
|
||||
+++ b/mongo/bson/bsoncodec/struct_codec.go
|
||||
@@ -122,6 +122,10 @@ func (sc *StructCodec) EncodeValue(r EncodeContext, vw bsonrw.ValueWriter, val r
|
||||
}
|
||||
var rv reflect.Value
|
||||
for _, desc := range sd.fl {
|
||||
+ if desc.omitAlways {
|
||||
+ continue
|
||||
+ }
|
||||
+
|
||||
if desc.inline == nil {
|
||||
rv = val.Field(desc.idx)
|
||||
} else {
|
||||
@@ -400,15 +404,16 @@ type structDescription struct {
|
||||
}
|
||||
|
||||
type fieldDescription struct {
|
||||
- name string // BSON key name
|
||||
- fieldName string // struct field name
|
||||
- idx int
|
||||
- omitEmpty bool
|
||||
- minSize bool
|
||||
- truncate bool
|
||||
- inline []int
|
||||
- encoder ValueEncoder
|
||||
- decoder ValueDecoder
|
||||
+ name string // BSON key name
|
||||
+ fieldName string // struct field name
|
||||
+ idx int
|
||||
+ omitEmpty bool
|
||||
+ omitAlways bool
|
||||
+ minSize bool
|
||||
+ truncate bool
|
||||
+ inline []int
|
||||
+ encoder ValueEncoder
|
||||
+ decoder ValueDecoder
|
||||
}
|
||||
|
||||
type byIndex []fieldDescription
|
||||
@@ -491,6 +496,7 @@ func (sc *StructCodec) describeStruct(r *Registry, t reflect.Type) (*structDescr
|
||||
}
|
||||
description.name = stags.Name
|
||||
description.omitEmpty = stags.OmitEmpty
|
||||
+ description.omitAlways = stags.OmitAlways
|
||||
description.minSize = stags.MinSize
|
||||
description.truncate = stags.Truncate
|
||||
|
||||
diff --git a/mongo/bson/bsoncodec/struct_tag_parser.go b/mongo/bson/bsoncodec/struct_tag_parser.go
|
||||
--- a/mongo/bson/bsoncodec/struct_tag_parser.go
|
||||
+++ b/mongo/bson/bsoncodec/struct_tag_parser.go
|
||||
@@ -52,12 +52,13 @@ func (stpf StructTagParserFunc) ParseStructTags(sf reflect.StructField) (StructT
|
||||
//
|
||||
// TODO(skriptble): Add tags for undefined as nil and for null as nil.
|
||||
type StructTags struct {
|
||||
- Name string
|
||||
- OmitEmpty bool
|
||||
- MinSize bool
|
||||
- Truncate bool
|
||||
- Inline bool
|
||||
- Skip bool
|
||||
+ Name string
|
||||
+ OmitEmpty bool
|
||||
+ OmitAlways bool
|
||||
+ MinSize bool
|
||||
+ Truncate bool
|
||||
+ Inline bool
|
||||
+ Skip bool
|
||||
}
|
||||
|
||||
// DefaultStructTagParser is the StructTagParser used by the StructCodec by default.
|
||||
@@ -108,6 +109,8 @@ func parseTags(key string, tag string) (StructTags, error) {
|
||||
switch str {
|
||||
case "omitempty":
|
||||
st.OmitEmpty = true
|
||||
+ case "omitalways":
|
||||
+ st.OmitAlways = true
|
||||
case "minsize":
|
||||
st.MinSize = true
|
||||
case "truncate":
|
95
_data/update-mongo.sh
Executable file
95
_data/update-mongo.sh
Executable file
@@ -0,0 +1,95 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
set -o nounset # disallow usage of unset vars ( set -u )
|
||||
set -o errexit # Exit immediately if a pipeline returns non-zero. ( set -e )
|
||||
set -o errtrace # Allow the above trap be inherited by all functions in the script. ( set -E )
|
||||
set -o pipefail # Return value of a pipeline is the value of the last (rightmost) command to exit with a non-zero status
|
||||
IFS=$'\n\t' # Set $IFS to only newline and tab.
|
||||
|
||||
|
||||
|
||||
dir="/tmp/mongo_repo_$( uuidgen )"
|
||||
|
||||
echo ""
|
||||
echo "> Clone https://github.dev/mongodb/mongo-go-driver"
|
||||
echo ""
|
||||
|
||||
git clone "https://github.com/mongodb/mongo-go-driver" "$dir"
|
||||
|
||||
pushd "$dir"
|
||||
|
||||
git fetch --tags
|
||||
|
||||
latestTag="$( git describe --tags `git rev-list --tags --max-count=1` )"
|
||||
|
||||
git -c "advice.detachedHead=false" checkout $latestTag
|
||||
|
||||
latestSHA="$( git rev-parse HEAD )"
|
||||
|
||||
popd
|
||||
|
||||
existingTag=$( cat mongoPatchVersion.go | grep -oP "(?<=const MongoCloneTag = \")([A-Za-z0-9.]+)(?=\")" )
|
||||
existingSHA=$( cat mongoPatchVersion.go | grep -oP "(?<=const MongoCloneCommit = \")([A-Za-z0-9.]+)(?=\")" )
|
||||
|
||||
echo "===================================="
|
||||
echo "ID (online) $latestSHA"
|
||||
echo "ID (local) $existingSHA"
|
||||
echo "Tag (online) $latestTag"
|
||||
echo "Tag (local) $existingTag"
|
||||
echo "===================================="
|
||||
|
||||
if [[ "$latestTag" == "$existingTag" ]]; then
|
||||
echo "Nothing to do"
|
||||
rm -rf "$dir"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "> Copy repository"
|
||||
echo ""
|
||||
|
||||
rm -rf mongo
|
||||
cp -r "$dir" "mongo"
|
||||
rm -rf "$dir"
|
||||
|
||||
echo ""
|
||||
echo "> Clean repository"
|
||||
echo ""
|
||||
|
||||
rm -rf "mongo/.git"
|
||||
rm -rf "mongo/.evergreen"
|
||||
rm -rf "mongo/cmd"
|
||||
rm -rf "mongo/docs"
|
||||
rm -rf "mongo/etc"
|
||||
rm -rf "mongo/examples"
|
||||
rm -rf "mongo/testdata"
|
||||
rm -rf "mongo/benchmark"
|
||||
rm -rf "mongo/vendor"
|
||||
rm -rf "mongo/internal/test"
|
||||
rm -rf "mongo/go.mod"
|
||||
rm -rf "mongo/go.sum"
|
||||
|
||||
echo ""
|
||||
echo "> Update mongoPatchVersion.go"
|
||||
echo ""
|
||||
|
||||
{
|
||||
|
||||
printf "package goext\n"
|
||||
printf "\n"
|
||||
printf "// %s\n" "$( date +"%Y-%m-%d %H:%M:%S%z" )"
|
||||
printf "\n"
|
||||
printf "const MongoCloneTag = \"%s\"\n" "$latestTag"
|
||||
printf "const MongoCloneCommit = \"%s\"\n" "$latestSHA"
|
||||
|
||||
} > mongoPatchVersion.go
|
||||
|
||||
echo ""
|
||||
echo "> Patch mongo"
|
||||
echo ""
|
||||
|
||||
git apply -v _data/mongo.patch
|
||||
|
||||
echo ""
|
||||
echo "Done."
|
@@ -21,11 +21,6 @@ if [ "$( git rev-parse --abbrev-ref HEAD )" != "master" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -n "Insert optional commit message: "
|
||||
read commitMessage
|
||||
echo ""
|
||||
|
||||
git pull --ff
|
||||
|
||||
go get -u ./...
|
||||
@@ -45,11 +40,6 @@ git add --verbose .
|
||||
|
||||
msg="v${next_ver}"
|
||||
|
||||
if [[ "$commitMessage" != "" ]]; then
|
||||
msg="${msg} ${commitMessage}"
|
||||
fi
|
||||
|
||||
|
||||
if [ $# -gt 0 ]; then
|
||||
msg="$1"
|
||||
fi
|
||||
|
Binary file not shown.
@@ -1,182 +0,0 @@
|
||||
package bfcodegen
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
_ "embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type CSIDDef struct {
|
||||
File string
|
||||
FileRelative string
|
||||
Name string
|
||||
Prefix string
|
||||
}
|
||||
|
||||
var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
|
||||
|
||||
var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`))
|
||||
|
||||
var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
|
||||
|
||||
//go:embed csid-generate.template
|
||||
var templateCSIDGenerateText string
|
||||
|
||||
func GenerateCharsetIDSpecs(sourceDir string, destFile string) error {
|
||||
|
||||
files, err := os.ReadDir(sourceDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
oldChecksum := "N/A"
|
||||
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
||||
content, err := os.ReadFile(destFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if m, ok := rexCSIDChecksumConst.MatchFirst(string(content)); ok {
|
||||
oldChecksum = m.GroupByName("cs").Value()
|
||||
}
|
||||
}
|
||||
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
|
||||
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
||||
|
||||
newChecksumStr := goext.GoextVersion
|
||||
for _, f := range files {
|
||||
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
||||
}
|
||||
|
||||
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
|
||||
|
||||
if newChecksum != oldChecksum {
|
||||
fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||
} else {
|
||||
fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||
return nil
|
||||
}
|
||||
|
||||
allIDs := make([]CSIDDef, 0)
|
||||
|
||||
pkgname := ""
|
||||
|
||||
for _, f := range files {
|
||||
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||
fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("\n")
|
||||
|
||||
allIDs = append(allIDs, fileIDs...)
|
||||
|
||||
if pn != "" {
|
||||
pkgname = pn
|
||||
}
|
||||
}
|
||||
|
||||
if pkgname == "" {
|
||||
return errors.New("no package name found in any file")
|
||||
}
|
||||
|
||||
fdata, err := format.Source([]byte(fmtCSIDOutput(newChecksum, allIDs, pkgname)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.WriteFile(destFile, fdata, 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) {
|
||||
file, err := os.Open(fn)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
defer func() { _ = file.Close() }()
|
||||
|
||||
bin, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
lines := strings.Split(string(bin), "\n")
|
||||
|
||||
ids := make([]CSIDDef, 0)
|
||||
|
||||
pkgname := ""
|
||||
|
||||
for i, line := range lines {
|
||||
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
|
||||
break
|
||||
}
|
||||
|
||||
if match, ok := rexCSIDPackage.MatchFirst(line); i == 0 && ok {
|
||||
pkgname = match.GroupByName("name").Value()
|
||||
continue
|
||||
}
|
||||
|
||||
if match, ok := rexCSIDDef.MatchFirst(line); ok {
|
||||
|
||||
rfp, err := filepath.Rel(basedir, fn)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
def := CSIDDef{
|
||||
File: fn,
|
||||
FileRelative: rfp,
|
||||
Name: match.GroupByName("name").Value(),
|
||||
Prefix: match.GroupByName("prefix").Value(),
|
||||
}
|
||||
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
|
||||
ids = append(ids, def)
|
||||
}
|
||||
}
|
||||
|
||||
return ids, pkgname, nil
|
||||
}
|
||||
|
||||
func fmtCSIDOutput(cs string, ids []CSIDDef, pkgname string) string {
|
||||
templ := template.Must(template.New("csid-generate").Parse(templateCSIDGenerateText))
|
||||
|
||||
buffer := bytes.Buffer{}
|
||||
|
||||
err := templ.Execute(&buffer, langext.H{
|
||||
"PkgName": pkgname,
|
||||
"Checksum": cs,
|
||||
"GoextVersion": goext.GoextVersion,
|
||||
"IDs": ids,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return buffer.String()
|
||||
}
|
@@ -1,190 +0,0 @@
|
||||
// Code generated by csid-generate.go DO NOT EDIT.
|
||||
|
||||
package {{.PkgName}}
|
||||
|
||||
import "crypto/rand"
|
||||
import "crypto/sha256"
|
||||
import "fmt"
|
||||
import "github.com/go-playground/validator/v10"
|
||||
import "github.com/rs/zerolog/log"
|
||||
import "gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
import "gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||
import "math/big"
|
||||
import "reflect"
|
||||
import "regexp"
|
||||
import "strings"
|
||||
|
||||
const ChecksumCharsetIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
|
||||
|
||||
const idlen = 24
|
||||
|
||||
const checklen = 1
|
||||
|
||||
const idCharset = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
const idCharsetLen = len(idCharset)
|
||||
|
||||
var charSetReverseMap = generateCharsetMap()
|
||||
|
||||
const ({{range .IDs}}
|
||||
prefix{{.Name}} = "{{.Prefix}}" {{end}}
|
||||
)
|
||||
|
||||
var ({{range .IDs}}
|
||||
regex{{.Name}} = generateRegex(prefix{{.Name}}) {{end}}
|
||||
)
|
||||
|
||||
func generateRegex(prefix string) rext.Regex {
|
||||
return rext.W(regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen)))
|
||||
}
|
||||
|
||||
func generateCharsetMap() []int {
|
||||
result := make([]int, 128)
|
||||
for i := 0; i < len(result); i++ {
|
||||
result[i] = -1
|
||||
}
|
||||
for idx, chr := range idCharset {
|
||||
result[int(chr)] = idx
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func generateID(prefix string) string {
|
||||
k := ""
|
||||
csMax := big.NewInt(int64(idCharsetLen))
|
||||
checksum := 0
|
||||
for i := 0; i < idlen-len(prefix)-checklen; i++ {
|
||||
v, err := rand.Int(rand.Reader, csMax)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
v64 := v.Int64()
|
||||
k += string(idCharset[v64])
|
||||
checksum = (checksum + int(v64)) % (idCharsetLen)
|
||||
}
|
||||
checkstr := string(idCharset[checksum%idCharsetLen])
|
||||
return prefix + k + checkstr
|
||||
}
|
||||
|
||||
func generateIDFromSeed(prefix string, seed string) string {
|
||||
h := sha256.New()
|
||||
|
||||
iddata := ""
|
||||
for len(iddata) < idlen-len(prefix)-checklen {
|
||||
h.Write([]byte(seed))
|
||||
bs := h.Sum(nil)
|
||||
iddata += langext.NewAnyBaseConverter(idCharset).Encode(bs)
|
||||
}
|
||||
|
||||
checksum := 0
|
||||
for i := 0; i < idlen-len(prefix)-checklen; i++ {
|
||||
ichr := int(iddata[i])
|
||||
checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen)
|
||||
}
|
||||
|
||||
checkstr := string(idCharset[checksum%idCharsetLen])
|
||||
|
||||
return prefix + iddata[:(idlen-len(prefix)-checklen)] + checkstr
|
||||
}
|
||||
|
||||
func validateID(prefix string, value string) error {
|
||||
if len(value) != idlen {
|
||||
return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build()
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(value, prefix) {
|
||||
return exerr.New(exerr.TypeInvalidCSID, "id is missing the correct prefix").Str("value", value).Str("prefix", prefix).Build()
|
||||
}
|
||||
|
||||
checksum := 0
|
||||
for i := len(prefix); i < len(value)-checklen; i++ {
|
||||
ichr := int(value[i])
|
||||
if ichr < 0 || ichr >= len(charSetReverseMap) || charSetReverseMap[ichr] == -1 {
|
||||
return exerr.New(exerr.TypeInvalidCSID, "id contains invalid characters").Str("value", value).Build()
|
||||
}
|
||||
checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen)
|
||||
}
|
||||
|
||||
checkstr := string(idCharset[checksum%idCharsetLen])
|
||||
|
||||
if !strings.HasSuffix(value, checkstr) {
|
||||
return exerr.New(exerr.TypeInvalidCSID, "id checkstring is invalid").Str("value", value).Str("checkstr", checkstr).Build()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getRawData(prefix string, value string) string {
|
||||
if len(value) != idlen {
|
||||
return ""
|
||||
}
|
||||
return value[len(prefix) : idlen-checklen]
|
||||
}
|
||||
|
||||
func getCheckString(prefix string, value string) string {
|
||||
if len(value) != idlen {
|
||||
return ""
|
||||
}
|
||||
return value[idlen-checklen:]
|
||||
}
|
||||
|
||||
func ValidateEntityID(vfl validator.FieldLevel) bool {
|
||||
if !vfl.Field().CanInterface() {
|
||||
log.Error().Msgf("Failed to validate EntityID (cannot interface ?!?)")
|
||||
return false
|
||||
}
|
||||
|
||||
ifvalue := vfl.Field().Interface()
|
||||
|
||||
if value1, ok := ifvalue.(EntityID); ok {
|
||||
|
||||
if vfl.Field().Type().Kind() == reflect.Pointer && langext.IsNil(value1) {
|
||||
return true
|
||||
}
|
||||
|
||||
if err := value1.Valid(); err != nil {
|
||||
log.Debug().Msgf("Failed to validate EntityID '%s' (%s)", value1.String(), err.Error())
|
||||
return false
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
|
||||
} else {
|
||||
log.Error().Msgf("Failed to validate EntityID (wrong type: %T)", ifvalue)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
{{range .IDs}}
|
||||
|
||||
// ================================ {{.Name}} ({{.FileRelative}}) ================================
|
||||
|
||||
func New{{.Name}}() {{.Name}} {
|
||||
return {{.Name}}(generateID(prefix{{.Name}}))
|
||||
}
|
||||
|
||||
func (id {{.Name}}) Valid() error {
|
||||
return validateID(prefix{{.Name}}, string(id))
|
||||
}
|
||||
|
||||
func (i {{.Name}}) String() string {
|
||||
return string(i)
|
||||
}
|
||||
|
||||
func (i {{.Name}}) Prefix() string {
|
||||
return prefix{{.Name}}
|
||||
}
|
||||
|
||||
func (id {{.Name}}) Raw() string {
|
||||
return getRawData(prefix{{.Name}}, string(id))
|
||||
}
|
||||
|
||||
func (id {{.Name}}) CheckString() string {
|
||||
return getCheckString(prefix{{.Name}}, string(id))
|
||||
}
|
||||
|
||||
func (id {{.Name}}) Regex() rext.Regex {
|
||||
return regex{{.Name}}
|
||||
}
|
||||
|
||||
{{end}}
|
@@ -1,52 +0,0 @@
|
||||
package bfcodegen
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
//go:embed _test_example.tgz
|
||||
var CSIDExampleModels []byte
|
||||
|
||||
func TestGenerateCSIDSpecs(t *testing.T) {
|
||||
|
||||
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
|
||||
|
||||
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||
|
||||
err := os.WriteFile(tmpFile, CSIDExampleModels, 0o777)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||
|
||||
err = os.Mkdir(tmpDir, 0o777)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
|
||||
|
||||
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println("=====================================================================================================")
|
||||
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/csid_gen.go"))(t)))
|
||||
fmt.Println("=====================================================================================================")
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
}
|
@@ -1,12 +1,10 @@
|
||||
package bfcodegen
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
_ "embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||
@@ -16,7 +14,7 @@ import (
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
)
|
||||
|
||||
type EnumDefVal struct {
|
||||
@@ -33,16 +31,13 @@ type EnumDef struct {
|
||||
Values []EnumDefVal
|
||||
}
|
||||
|
||||
var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
|
||||
var rexPackage = rext.W(regexp.MustCompile("^package\\s+(?P<name>[A-Za-z0-9_]+)\\s*$"))
|
||||
|
||||
var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
|
||||
var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*//\\s*(@enum:type).*$"))
|
||||
|
||||
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`))
|
||||
var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$"))
|
||||
|
||||
var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
|
||||
|
||||
//go:embed enum-generate.template
|
||||
var templateEnumGenerateText string
|
||||
var rexChecksumConst = rext.W(regexp.MustCompile("const ChecksumGenerator = \"(?P<cs>[A-Za-z0-9_]*)\""))
|
||||
|
||||
func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
|
||||
@@ -57,14 +52,13 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if m, ok := rexEnumChecksumConst.MatchFirst(string(content)); ok {
|
||||
if m, ok := rexChecksumConst.MatchFirst(string(content)); ok {
|
||||
oldChecksum = m.GroupByName("cs").Value()
|
||||
}
|
||||
}
|
||||
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
|
||||
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
||||
|
||||
newChecksumStr := goext.GoextVersion
|
||||
@@ -91,7 +85,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
|
||||
for _, f := range files {
|
||||
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||
fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||
fileEnums, pn, err := processFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -109,20 +103,29 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
return errors.New("no package name found in any file")
|
||||
}
|
||||
|
||||
fdata, err := format.Source([]byte(fmtEnumOutput(newChecksum, allEnums, pkgname)))
|
||||
err = os.WriteFile(destFile, []byte(fmtOutput(newChecksum, allEnums, pkgname)), 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.WriteFile(destFile, fdata, 0o755)
|
||||
res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if res.CommandTimedOut {
|
||||
fmt.Println(res.StdCombined)
|
||||
return errors.New("go fmt timed out")
|
||||
}
|
||||
if res.ExitCode != 0 {
|
||||
fmt.Println(res.StdCombined)
|
||||
return errors.New("go fmt did not succeed")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
||||
func processFile(basedir string, fn string) ([]EnumDef, string, error) {
|
||||
file, err := os.Open(fn)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
@@ -146,7 +149,7 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
||||
break
|
||||
}
|
||||
|
||||
if match, ok := rexEnumPackage.MatchFirst(line); i == 0 && ok {
|
||||
if match, ok := rexPackage.MatchFirst(line); i == 0 && ok {
|
||||
pkgname = match.GroupByName("name").Value()
|
||||
continue
|
||||
}
|
||||
@@ -169,7 +172,7 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
||||
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
|
||||
}
|
||||
|
||||
if match, ok := rexEnumValueDef.MatchFirst(line); ok {
|
||||
if match, ok := rexValueDef.MatchFirst(line); ok {
|
||||
typename := match.GroupByName("type").Value()
|
||||
def := EnumDefVal{
|
||||
VarName: match.GroupByName("name").Value(),
|
||||
@@ -199,33 +202,163 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
||||
return enums, pkgname, nil
|
||||
}
|
||||
|
||||
func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string {
|
||||
func fmtOutput(cs string, enums []EnumDef, pkgname string) string {
|
||||
str := "// Code generated by enum-generate.go DO NOT EDIT.\n"
|
||||
str += "\n"
|
||||
str += "package " + pkgname + "\n"
|
||||
str += "\n"
|
||||
|
||||
templ := template.New("enum-generate")
|
||||
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n"
|
||||
str += "\n"
|
||||
|
||||
templ = templ.Funcs(template.FuncMap{
|
||||
"boolToStr": func(b bool) string { return langext.Conditional(b, "true", "false") },
|
||||
"deref": func(v *string) string { return *v },
|
||||
"trimSpace": func(str string) string { return strings.TrimSpace(str) },
|
||||
"hasStr": func(v EnumDef) bool { return v.Type == "string" },
|
||||
"hasDescr": func(v EnumDef) bool {
|
||||
return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil })
|
||||
},
|
||||
})
|
||||
str += "const ChecksumGenerator = \"" + cs + "\"" + "\n"
|
||||
str += "\n"
|
||||
|
||||
templ = template.Must(templ.Parse(templateEnumGenerateText))
|
||||
str += "type Enum interface {" + "\n"
|
||||
str += " Valid() bool" + "\n"
|
||||
str += " ValuesAny() []any" + "\n"
|
||||
str += " ValuesMeta() []EnumMetaValue" + "\n"
|
||||
str += " VarName() string" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
buffer := bytes.Buffer{}
|
||||
str += "type StringEnum interface {" + "\n"
|
||||
str += " Enum" + "\n"
|
||||
str += " String() string" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "type DescriptionEnum interface {" + "\n"
|
||||
str += " Enum" + "\n"
|
||||
str += " Description() string" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "\n"
|
||||
|
||||
str += "type EnumMetaValue struct {" + "\n"
|
||||
str += " VarName string `json:\"varName\"`" + "\n"
|
||||
str += " Value any `json:\"value\"`" + "\n"
|
||||
str += " Description *string `json:\"description\"`" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "\n"
|
||||
|
||||
for _, enumdef := range enums {
|
||||
|
||||
hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil })
|
||||
hasStr := enumdef.Type == "string"
|
||||
|
||||
str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n"
|
||||
str += "//" + "\n"
|
||||
str += "// File: " + enumdef.FileRelative + "\n"
|
||||
str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n"
|
||||
str += "// DescrEnum: " + langext.Conditional(hasDescr, "true", "false") + "\n"
|
||||
str += "//" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
str += " " + v.VarName + "," + "\n"
|
||||
}
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
if hasDescr {
|
||||
str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
str += " " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n"
|
||||
}
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
}
|
||||
|
||||
str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
str += " " + v.VarName + ": \"" + v.VarName + "\"," + "\n"
|
||||
}
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n"
|
||||
str += " return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n"
|
||||
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n"
|
||||
str += " return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []EnumMetaValue {" + "\n"
|
||||
str += " return []EnumMetaValue{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
if hasDescr {
|
||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
|
||||
} else {
|
||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
|
||||
}
|
||||
}
|
||||
str += " }" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
if hasStr {
|
||||
str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n"
|
||||
str += " return string(e)" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
}
|
||||
|
||||
if hasDescr {
|
||||
str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n"
|
||||
str += " if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n"
|
||||
str += " return d" + "\n"
|
||||
str += " }" + "\n"
|
||||
str += " return \"\"" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
}
|
||||
|
||||
str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n"
|
||||
str += " if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n"
|
||||
str += " return d" + "\n"
|
||||
str += " }" + "\n"
|
||||
str += " return \"\"" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n"
|
||||
str += " for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n"
|
||||
str += " if string(ev) == vv {" + "\n"
|
||||
str += " return ev, true" + "\n"
|
||||
str += " }" + "\n"
|
||||
str += " }" + "\n"
|
||||
str += " return \"\", false" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n"
|
||||
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
str += "func " + enumdef.EnumTypeName + "ValuesMeta() []EnumMetaValue {" + "\n"
|
||||
str += " return []EnumMetaValue{" + "\n"
|
||||
for _, v := range enumdef.Values {
|
||||
if hasDescr {
|
||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
|
||||
} else {
|
||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
|
||||
}
|
||||
}
|
||||
str += " }" + "\n"
|
||||
str += "}" + "\n"
|
||||
str += "" + "\n"
|
||||
|
||||
err := templ.Execute(&buffer, langext.H{
|
||||
"PkgName": pkgname,
|
||||
"Checksum": cs,
|
||||
"GoextVersion": goext.GoextVersion,
|
||||
"Enums": enums,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return buffer.String()
|
||||
return str
|
||||
}
|
||||
|
@@ -1,111 +0,0 @@
|
||||
// Code generated by enum-generate.go DO NOT EDIT.
|
||||
|
||||
package {{.PkgName}}
|
||||
|
||||
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
import "gogs.mikescher.com/BlackForestBytes/goext/enums"
|
||||
|
||||
const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
|
||||
|
||||
{{range .Enums}}
|
||||
|
||||
{{ $hasStr := ( . | hasStr ) }}
|
||||
{{ $hasDescr := ( . | hasDescr ) }}
|
||||
|
||||
// ================================ {{.EnumTypeName}} ================================
|
||||
//
|
||||
// File: {{.FileRelative}}
|
||||
// StringEnum: {{$hasStr | boolToStr}}
|
||||
// DescrEnum: {{$hasDescr | boolToStr}}
|
||||
//
|
||||
|
||||
var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}}
|
||||
{{.VarName}}, {{end}}
|
||||
}
|
||||
|
||||
{{if $hasDescr}}
|
||||
var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}}
|
||||
{{.VarName}}: "{{.Description | deref | trimSpace}}", {{end}}
|
||||
}
|
||||
{{end}}
|
||||
|
||||
var __{{.EnumTypeName}}Varnames = map[{{.EnumTypeName}}]string{ {{range .Values}}
|
||||
{{.VarName}}: "{{.VarName}}", {{end}}
|
||||
}
|
||||
|
||||
func (e {{.EnumTypeName}}) Valid() bool {
|
||||
return langext.InArray(e, __{{.EnumTypeName}}Values)
|
||||
}
|
||||
|
||||
func (e {{.EnumTypeName}}) Values() []{{.EnumTypeName}} {
|
||||
return __{{.EnumTypeName}}Values
|
||||
}
|
||||
|
||||
func (e {{.EnumTypeName}}) ValuesAny() []any {
|
||||
return langext.ArrCastToAny(__{{.EnumTypeName}}Values)
|
||||
}
|
||||
|
||||
func (e {{.EnumTypeName}}) ValuesMeta() []enums.EnumMetaValue {
|
||||
return {{.EnumTypeName}}ValuesMeta()
|
||||
}
|
||||
|
||||
{{if $hasStr}}
|
||||
func (e {{.EnumTypeName}}) String() string {
|
||||
return string(e)
|
||||
}
|
||||
{{end}}
|
||||
|
||||
{{if $hasDescr}}
|
||||
func (e {{.EnumTypeName}}) Description() string {
|
||||
if d, ok := __{{.EnumTypeName}}Descriptions[e]; ok {
|
||||
return d
|
||||
}
|
||||
return ""
|
||||
}
|
||||
{{end}}
|
||||
|
||||
func (e {{.EnumTypeName}}) VarName() string {
|
||||
if d, ok := __{{.EnumTypeName}}Varnames[e]; ok {
|
||||
return d
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue {
|
||||
{{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}}
|
||||
}
|
||||
|
||||
{{if $hasDescr}}
|
||||
func (e {{.EnumTypeName}}) DescriptionMeta() enums.EnumDescriptionMetaValue {
|
||||
return enums.EnumDescriptionMetaValue{VarName: e.VarName(), Value: e, Description: e.Description()}
|
||||
}
|
||||
{{end}}
|
||||
|
||||
func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) {
|
||||
for _, ev := range __{{.EnumTypeName}}Values {
|
||||
if string(ev) == vv {
|
||||
return ev, true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
func {{.EnumTypeName}}Values() []{{.EnumTypeName}} {
|
||||
return __{{.EnumTypeName}}Values
|
||||
}
|
||||
|
||||
func {{.EnumTypeName}}ValuesMeta() []enums.EnumMetaValue {
|
||||
return []enums.EnumMetaValue{ {{range .Values}}
|
||||
{{.VarName}}.Meta(), {{end}}
|
||||
}
|
||||
}
|
||||
|
||||
{{if $hasDescr}}
|
||||
func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue {
|
||||
return []enums.EnumDescriptionMetaValue{ {{range .Values}}
|
||||
{{.VarName}}.DescriptionMeta(), {{end}}
|
||||
}
|
||||
}
|
||||
{{end}}
|
||||
|
||||
{{end}}
|
@@ -1,52 +1,15 @@
|
||||
package bfcodegen
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
//go:embed _test_example.tgz
|
||||
var EnumExampleModels []byte
|
||||
func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
|
||||
func TestGenerateEnumSpecs(t *testing.T) {
|
||||
err := GenerateEnumSpecs("/home/mike/Code/reiff/badennet/bnet-backend/models", "/home/mike/Code/reiff/badennet/bnet-backend/models/enums_gen.go")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
|
||||
|
||||
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||
|
||||
err := os.WriteFile(tmpFile, EnumExampleModels, 0o777)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||
|
||||
err = os.Mkdir(tmpDir, 0o777)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
|
||||
|
||||
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println("=====================================================================================================")
|
||||
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/enums_gen.go"))(t)))
|
||||
fmt.Println("=====================================================================================================")
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
}
|
||||
|
@@ -1,183 +0,0 @@
|
||||
package bfcodegen
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
_ "embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type IDDef struct {
|
||||
File string
|
||||
FileRelative string
|
||||
Name string
|
||||
}
|
||||
|
||||
var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
|
||||
|
||||
var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`))
|
||||
|
||||
var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
|
||||
|
||||
//go:embed id-generate.template
|
||||
var templateIDGenerateText string
|
||||
|
||||
func GenerateIDSpecs(sourceDir string, destFile string) error {
|
||||
|
||||
files, err := os.ReadDir(sourceDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
oldChecksum := "N/A"
|
||||
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
||||
content, err := os.ReadFile(destFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if m, ok := rexIDChecksumConst.MatchFirst(string(content)); ok {
|
||||
oldChecksum = m.GroupByName("cs").Value()
|
||||
}
|
||||
}
|
||||
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
|
||||
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
||||
|
||||
newChecksumStr := goext.GoextVersion
|
||||
for _, f := range files {
|
||||
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
||||
}
|
||||
|
||||
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
|
||||
|
||||
if newChecksum != oldChecksum {
|
||||
fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||
} else {
|
||||
fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||
return nil
|
||||
}
|
||||
|
||||
allIDs := make([]IDDef, 0)
|
||||
|
||||
pkgname := ""
|
||||
|
||||
for _, f := range files {
|
||||
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||
fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("\n")
|
||||
|
||||
allIDs = append(allIDs, fileIDs...)
|
||||
|
||||
if pn != "" {
|
||||
pkgname = pn
|
||||
}
|
||||
}
|
||||
|
||||
if pkgname == "" {
|
||||
return errors.New("no package name found in any file")
|
||||
}
|
||||
|
||||
fdata, err := format.Source([]byte(fmtIDOutput(newChecksum, allIDs, pkgname)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.WriteFile(destFile, fdata, 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func processIDFile(basedir string, fn string) ([]IDDef, string, error) {
|
||||
file, err := os.Open(fn)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
defer func() { _ = file.Close() }()
|
||||
|
||||
bin, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
lines := strings.Split(string(bin), "\n")
|
||||
|
||||
ids := make([]IDDef, 0)
|
||||
|
||||
pkgname := ""
|
||||
|
||||
for i, line := range lines {
|
||||
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
|
||||
break
|
||||
}
|
||||
|
||||
if match, ok := rexIDPackage.MatchFirst(line); i == 0 && ok {
|
||||
pkgname = match.GroupByName("name").Value()
|
||||
continue
|
||||
}
|
||||
|
||||
if match, ok := rexIDDef.MatchFirst(line); ok {
|
||||
|
||||
rfp, err := filepath.Rel(basedir, fn)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
def := IDDef{
|
||||
File: fn,
|
||||
FileRelative: rfp,
|
||||
Name: match.GroupByName("name").Value(),
|
||||
}
|
||||
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
|
||||
ids = append(ids, def)
|
||||
}
|
||||
}
|
||||
|
||||
return ids, pkgname, nil
|
||||
}
|
||||
|
||||
func fmtIDOutput(cs string, ids []IDDef, pkgname string) string {
|
||||
templ := template.Must(template.New("id-generate").Parse(templateIDGenerateText))
|
||||
|
||||
buffer := bytes.Buffer{}
|
||||
|
||||
anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" })
|
||||
|
||||
err := templ.Execute(&buffer, langext.H{
|
||||
"PkgName": pkgname,
|
||||
"Checksum": cs,
|
||||
"GoextVersion": goext.GoextVersion,
|
||||
"IDs": ids,
|
||||
"AnyDef": anyDef,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return buffer.String()
|
||||
}
|
@@ -1,47 +0,0 @@
|
||||
// Code generated by id-generate.go DO NOT EDIT.
|
||||
|
||||
package {{.PkgName}}
|
||||
|
||||
import "go.mongodb.org/mongo-driver/bson"
|
||||
import "go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
import "go.mongodb.org/mongo-driver/bson/primitive"
|
||||
import "gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
|
||||
const ChecksumIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
|
||||
|
||||
{{range .IDs}}
|
||||
|
||||
// ================================ {{.Name}} ({{.FileRelative}}) ================================
|
||||
|
||||
func (i {{.Name}}) MarshalBSONValue() (bsontype.Type, []byte, error) {
|
||||
if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil {
|
||||
return bson.MarshalValue(objId)
|
||||
} else {
|
||||
return 0, nil, exerr.New(exerr.TypeMarshalEntityID, "Failed to marshal {{.Name}}("+i.String()+") to ObjectId").Str("value", string(i)).Type("type", i).Build()
|
||||
}
|
||||
}
|
||||
|
||||
func (i {{.Name}}) String() string {
|
||||
return string(i)
|
||||
}
|
||||
|
||||
func (i {{.Name}}) ObjID() (primitive.ObjectID, error) {
|
||||
return primitive.ObjectIDFromHex(string(i))
|
||||
}
|
||||
|
||||
func (i {{.Name}}) Valid() bool {
|
||||
_, err := primitive.ObjectIDFromHex(string(i))
|
||||
return err == nil
|
||||
}
|
||||
|
||||
{{if ne $.AnyDef nil}}
|
||||
func (i {{.Name}}) AsAny() {{$.AnyDef.Name}} {
|
||||
return {{$.AnyDef.Name}}(i)
|
||||
}
|
||||
{{end}}
|
||||
|
||||
func New{{.Name}}() {{.Name}} {
|
||||
return {{.Name}}(primitive.NewObjectID().Hex())
|
||||
}
|
||||
|
||||
{{end}}
|
@@ -1,52 +0,0 @@
|
||||
package bfcodegen
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
//go:embed _test_example.tgz
|
||||
var IDExampleModels []byte
|
||||
|
||||
func TestGenerateIDSpecs(t *testing.T) {
|
||||
|
||||
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
|
||||
|
||||
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||
|
||||
err := os.WriteFile(tmpFile, IDExampleModels, 0o777)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||
|
||||
err = os.Mkdir(tmpDir, 0o777)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
|
||||
|
||||
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println("=====================================================================================================")
|
||||
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/id_gen.go"))(t)))
|
||||
fmt.Println("=====================================================================================================")
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
}
|
@@ -14,7 +14,6 @@ type CommandRunner struct {
|
||||
listener []CommandListener
|
||||
enforceExitCodes *[]int
|
||||
enforceNoTimeout bool
|
||||
enforceNoStderr bool
|
||||
}
|
||||
|
||||
func Runner(program string) *CommandRunner {
|
||||
@@ -26,7 +25,6 @@ func Runner(program string) *CommandRunner {
|
||||
listener: make([]CommandListener, 0),
|
||||
enforceExitCodes: nil,
|
||||
enforceNoTimeout: false,
|
||||
enforceNoStderr: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,11 +73,6 @@ func (r *CommandRunner) FailOnTimeout() *CommandRunner {
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *CommandRunner) FailOnStderr() *CommandRunner {
|
||||
r.enforceNoStderr = true
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *CommandRunner) Listen(lstr CommandListener) *CommandRunner {
|
||||
r.listener = append(r.listener, lstr)
|
||||
return r
|
||||
|
@@ -11,7 +11,6 @@ import (
|
||||
|
||||
var ErrExitCode = errors.New("process exited with an unexpected exitcode")
|
||||
var ErrTimeout = errors.New("process did not exit after the specified timeout")
|
||||
var ErrStderrPrint = errors.New("process did print to stderr stream")
|
||||
|
||||
type CommandResult struct {
|
||||
StdOut string
|
||||
@@ -54,27 +53,12 @@ func run(opt CommandRunner) (CommandResult, error) {
|
||||
err error
|
||||
}
|
||||
|
||||
stderrFailChan := make(chan bool)
|
||||
|
||||
outputChan := make(chan resultObj)
|
||||
go func() {
|
||||
// we need to first fully read the pipes and then call Wait
|
||||
// see https://pkg.go.dev/os/exec#Cmd.StdoutPipe
|
||||
|
||||
listener := make([]CommandListener, 0)
|
||||
listener = append(listener, opt.listener...)
|
||||
|
||||
if opt.enforceNoStderr {
|
||||
listener = append(listener, genericCommandListener{
|
||||
_readRawStderr: langext.Ptr(func(v []byte) {
|
||||
if len(v) > 0 {
|
||||
stderrFailChan <- true
|
||||
}
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
stdout, stderr, stdcombined, err := preader.Read(listener)
|
||||
stdout, stderr, stdcombined, err := preader.Read(opt.listener)
|
||||
if err != nil {
|
||||
outputChan <- resultObj{stdout, stderr, stdcombined, err}
|
||||
_ = cmd.Process.Kill()
|
||||
@@ -131,34 +115,8 @@ func run(opt CommandRunner) (CommandResult, error) {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
case <-stderrFailChan:
|
||||
_ = cmd.Process.Kill()
|
||||
|
||||
if fallback, ok := syncext.ReadChannelWithTimeout(outputChan, 32*time.Millisecond); ok {
|
||||
// most of the time the cmd.Process.Kill() should also have finished the pipereader
|
||||
// and we can at least return the already collected stdout, stderr, etc
|
||||
res := CommandResult{
|
||||
StdOut: fallback.stdout,
|
||||
StdErr: fallback.stderr,
|
||||
StdCombined: fallback.stdcombined,
|
||||
ExitCode: -1,
|
||||
CommandTimedOut: false,
|
||||
}
|
||||
return res, ErrStderrPrint
|
||||
} else {
|
||||
res := CommandResult{
|
||||
StdOut: "",
|
||||
StdErr: "",
|
||||
StdCombined: "",
|
||||
ExitCode: -1,
|
||||
CommandTimedOut: false,
|
||||
}
|
||||
return res, ErrStderrPrint
|
||||
}
|
||||
|
||||
case outobj := <-outputChan:
|
||||
var exiterr *exec.ExitError
|
||||
if errors.As(outobj.err, &exiterr) {
|
||||
if exiterr, ok := outobj.err.(*exec.ExitError); ok {
|
||||
excode := exiterr.ExitCode()
|
||||
for _, lstr := range opt.listener {
|
||||
lstr.Finished(excode)
|
||||
|
@@ -1,7 +1,6 @@
|
||||
package cmdext
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -33,7 +32,7 @@ func TestStdout(t *testing.T) {
|
||||
|
||||
func TestStderr(t *testing.T) {
|
||||
|
||||
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run()
|
||||
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run()
|
||||
if err != nil {
|
||||
t.Errorf("%v", err)
|
||||
}
|
||||
@@ -56,7 +55,7 @@ func TestStderr(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestStdcombined(t *testing.T) {
|
||||
res1, err := Runner("python3").
|
||||
res1, err := Runner("python").
|
||||
Arg("-c").
|
||||
Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)").
|
||||
Run()
|
||||
@@ -82,7 +81,7 @@ func TestStdcombined(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPartialRead(t *testing.T) {
|
||||
res1, err := Runner("python3").
|
||||
res1, err := Runner("python").
|
||||
Arg("-c").
|
||||
Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);").
|
||||
Timeout(100 * time.Millisecond).
|
||||
@@ -106,7 +105,7 @@ func TestPartialRead(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPartialReadStderr(t *testing.T) {
|
||||
res1, err := Runner("python3").
|
||||
res1, err := Runner("python").
|
||||
Arg("-c").
|
||||
Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);").
|
||||
Timeout(100 * time.Millisecond).
|
||||
@@ -131,7 +130,7 @@ func TestPartialReadStderr(t *testing.T) {
|
||||
|
||||
func TestReadUnflushedStdout(t *testing.T) {
|
||||
|
||||
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run()
|
||||
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run()
|
||||
if err != nil {
|
||||
t.Errorf("%v", err)
|
||||
}
|
||||
@@ -155,7 +154,7 @@ func TestReadUnflushedStdout(t *testing.T) {
|
||||
|
||||
func TestReadUnflushedStderr(t *testing.T) {
|
||||
|
||||
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run()
|
||||
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run()
|
||||
if err != nil {
|
||||
t.Errorf("%v", err)
|
||||
}
|
||||
@@ -180,7 +179,7 @@ func TestReadUnflushedStderr(t *testing.T) {
|
||||
func TestPartialReadUnflushed(t *testing.T) {
|
||||
t.SkipNow()
|
||||
|
||||
res1, err := Runner("python3").
|
||||
res1, err := Runner("python").
|
||||
Arg("-c").
|
||||
Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');").
|
||||
Timeout(100 * time.Millisecond).
|
||||
@@ -206,7 +205,7 @@ func TestPartialReadUnflushed(t *testing.T) {
|
||||
func TestPartialReadUnflushedStderr(t *testing.T) {
|
||||
t.SkipNow()
|
||||
|
||||
res1, err := Runner("python3").
|
||||
res1, err := Runner("python").
|
||||
Arg("-c").
|
||||
Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');").
|
||||
Timeout(100 * time.Millisecond).
|
||||
@@ -231,7 +230,7 @@ func TestPartialReadUnflushedStderr(t *testing.T) {
|
||||
|
||||
func TestListener(t *testing.T) {
|
||||
|
||||
res1, err := Runner("python3").
|
||||
res1, err := Runner("python").
|
||||
Arg("-c").
|
||||
Arg("import sys;" +
|
||||
"import time;" +
|
||||
@@ -264,7 +263,7 @@ func TestListener(t *testing.T) {
|
||||
|
||||
func TestLongStdout(t *testing.T) {
|
||||
|
||||
res1, err := Runner("python3").
|
||||
res1, err := Runner("python").
|
||||
Arg("-c").
|
||||
Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");").
|
||||
Timeout(5000 * time.Millisecond).
|
||||
@@ -290,40 +289,16 @@ func TestLongStdout(t *testing.T) {
|
||||
func TestFailOnTimeout(t *testing.T) {
|
||||
|
||||
_, err := Runner("sleep").Arg("2").Timeout(200 * time.Millisecond).FailOnTimeout().Run()
|
||||
if !errors.Is(err, ErrTimeout) {
|
||||
if err != ErrTimeout {
|
||||
t.Errorf("wrong err := %v", err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestFailOnStderr(t *testing.T) {
|
||||
|
||||
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").FailOnStderr().Run()
|
||||
if err == nil {
|
||||
t.Errorf("no err")
|
||||
}
|
||||
if res1.CommandTimedOut {
|
||||
t.Errorf("Timeout")
|
||||
}
|
||||
if res1.ExitCode != -1 {
|
||||
t.Errorf("res1.ExitCode == %v", res1.ExitCode)
|
||||
}
|
||||
if res1.StdErr != "error" {
|
||||
t.Errorf("res1.StdErr == '%v'", res1.StdErr)
|
||||
}
|
||||
if res1.StdOut != "" {
|
||||
t.Errorf("res1.StdOut == '%v'", res1.StdOut)
|
||||
}
|
||||
if res1.StdCombined != "error\n" {
|
||||
t.Errorf("res1.StdCombined == '%v'", res1.StdCombined)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestFailOnExitcode(t *testing.T) {
|
||||
|
||||
_, err := Runner("false").Timeout(200 * time.Millisecond).FailOnExitCode().Run()
|
||||
if !errors.Is(err, ErrExitCode) {
|
||||
if err != ErrExitCode {
|
||||
t.Errorf("wrong err := %v", err)
|
||||
}
|
||||
|
||||
|
@@ -32,8 +32,8 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
|
||||
stdout := ""
|
||||
go func() {
|
||||
buf := make([]byte, 128)
|
||||
for {
|
||||
n, err := pr.stdout.Read(buf)
|
||||
for true {
|
||||
n, out := pr.stdout.Read(buf)
|
||||
if n > 0 {
|
||||
txt := string(buf[:n])
|
||||
stdout += txt
|
||||
@@ -42,11 +42,11 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
|
||||
lstr.ReadRawStdout(buf[:n])
|
||||
}
|
||||
}
|
||||
if err == io.EOF {
|
||||
if out == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
errch <- err
|
||||
if out != nil {
|
||||
errch <- out
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -61,7 +61,7 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
|
||||
stderr := ""
|
||||
go func() {
|
||||
buf := make([]byte, 128)
|
||||
for {
|
||||
for true {
|
||||
n, err := pr.stderr.Read(buf)
|
||||
|
||||
if n > 0 {
|
||||
|
@@ -41,12 +41,12 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error
|
||||
continue
|
||||
}
|
||||
|
||||
envkey, found := rsfield.Tag.Lookup("env")
|
||||
if !found || envkey == "-" {
|
||||
continue
|
||||
}
|
||||
if rvfield.Kind() == reflect.Struct {
|
||||
|
||||
if rvfield.Kind() == reflect.Struct && rvfield.Type() != reflect.TypeOf(time.UnixMilli(0)) {
|
||||
envkey, found := rsfield.Tag.Lookup("env")
|
||||
if !found || envkey == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
subPrefix := prefix
|
||||
if envkey != "" {
|
||||
@@ -57,7 +57,10 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
envkey := rsfield.Tag.Get("env")
|
||||
if envkey == "" || envkey == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@@ -3,7 +3,6 @@ package cryptext
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
@@ -15,15 +14,14 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const LatestPassHashVersion = 5
|
||||
const LatestPassHashVersion = 4
|
||||
|
||||
// PassHash
|
||||
// - [v0]: plaintext password ( `0|...` ) // simple, used to write PW's directly in DB
|
||||
// - [v1]: sha256(plaintext) // simple hashing
|
||||
// - [v2]: seed | sha256<seed>(plaintext) // add seed
|
||||
// - [v3]: seed | sha256<seed>(plaintext) | [hex(totp)] // add TOTP support
|
||||
// - [v4]: bcrypt(plaintext) | [hex(totp)] // use proper bcrypt
|
||||
// - [v5]: bcrypt(sha512(plaintext)) | [hex(totp)] // hash pw before bcrypt (otherwise max pw-len = 72)
|
||||
// - [v0]: plaintext password ( `0|...` )
|
||||
// - [v1]: sha256(plaintext)
|
||||
// - [v2]: seed | sha256<seed>(plaintext)
|
||||
// - [v3]: seed | sha256<seed>(plaintext) | [hex(totp)]
|
||||
// - [v4]: bcrypt(plaintext) | [hex(totp)]
|
||||
type PassHash string
|
||||
|
||||
func (ph PassHash) Valid() bool {
|
||||
@@ -66,6 +64,7 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo
|
||||
return int(version), nil, payload, false, nil, true
|
||||
}
|
||||
|
||||
//
|
||||
if version == 2 {
|
||||
if len(split) != 3 {
|
||||
return -1, nil, nil, false, nil, false
|
||||
@@ -110,21 +109,7 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo
|
||||
totp := false
|
||||
totpsecret := make([]byte, 0)
|
||||
if split[2] != "0" {
|
||||
totpsecret, err = hex.DecodeString(split[2])
|
||||
totp = true
|
||||
}
|
||||
return int(version), nil, payload, totp, totpsecret, true
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
if len(split) != 3 {
|
||||
return -1, nil, nil, false, nil, false
|
||||
}
|
||||
payload := []byte(split[1])
|
||||
totp := false
|
||||
totpsecret := make([]byte, 0)
|
||||
if split[2] != "0" {
|
||||
totpsecret, err = hex.DecodeString(split[2])
|
||||
totpsecret, err = hex.DecodeString(split[3])
|
||||
totp = true
|
||||
}
|
||||
return int(version), nil, payload, totp, totpsecret, true
|
||||
@@ -171,14 +156,6 @@ func (ph PassHash) Verify(plainpass string, totp *string) bool {
|
||||
}
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
if !hastotp {
|
||||
return bcrypt.CompareHashAndPassword(payload, hash512(plainpass)) == nil
|
||||
} else {
|
||||
return bcrypt.CompareHashAndPassword(payload, hash512(plainpass)) == nil && totpext.Validate(totpsecret, *totp)
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -232,12 +209,6 @@ func (ph PassHash) ClearTOTP() (PassHash, error) {
|
||||
return PassHash(strings.Join(split, "|")), nil
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
split := strings.Split(string(ph), "|")
|
||||
split[2] = "0"
|
||||
return PassHash(strings.Join(split, "|")), nil
|
||||
}
|
||||
|
||||
return "", errors.New("unknown version")
|
||||
}
|
||||
|
||||
@@ -271,12 +242,6 @@ func (ph PassHash) WithTOTP(totpSecret []byte) (PassHash, error) {
|
||||
return PassHash(strings.Join(split, "|")), nil
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
split := strings.Split(string(ph), "|")
|
||||
split[2] = hex.EncodeToString(totpSecret)
|
||||
return PassHash(strings.Join(split, "|")), nil
|
||||
}
|
||||
|
||||
return "", errors.New("unknown version")
|
||||
}
|
||||
|
||||
@@ -306,10 +271,6 @@ func (ph PassHash) Change(newPlainPass string) (PassHash, error) {
|
||||
return HashPasswordV4(newPlainPass, langext.Conditional(hastotp, totpsecret, nil))
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
return HashPasswordV5(newPlainPass, langext.Conditional(hastotp, totpsecret, nil))
|
||||
}
|
||||
|
||||
return "", errors.New("unknown version")
|
||||
}
|
||||
|
||||
@@ -318,24 +279,7 @@ func (ph PassHash) String() string {
|
||||
}
|
||||
|
||||
func HashPassword(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||
return HashPasswordV5(plainpass, totpSecret)
|
||||
}
|
||||
|
||||
func HashPasswordV5(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||
var strtotp string
|
||||
|
||||
if totpSecret == nil {
|
||||
strtotp = "0"
|
||||
} else {
|
||||
strtotp = hex.EncodeToString(totpSecret)
|
||||
}
|
||||
|
||||
payload, err := bcrypt.GenerateFromPassword(hash512(plainpass), bcrypt.MinCost)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return PassHash(fmt.Sprintf("5|%s|%s", string(payload), strtotp)), nil
|
||||
return HashPasswordV4(plainpass, totpSecret)
|
||||
}
|
||||
|
||||
func HashPasswordV4(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||
@@ -396,13 +340,6 @@ func HashPasswordV0(plainpass string) (PassHash, error) {
|
||||
return PassHash(fmt.Sprintf("0|%s", plainpass)), nil
|
||||
}
|
||||
|
||||
func hash512(s string) []byte {
|
||||
h := sha512.New()
|
||||
h.Write([]byte(s))
|
||||
bs := h.Sum(nil)
|
||||
return bs
|
||||
}
|
||||
|
||||
func hash256(s string) []byte {
|
||||
h := sha256.New()
|
||||
h.Write([]byte(s))
|
||||
|
@@ -1,210 +0,0 @@
|
||||
package cryptext
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/totpext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPassHash1(t *testing.T) {
|
||||
ph, err := HashPassword("test123", nil)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
}
|
||||
|
||||
func TestPassHashTOTP(t *testing.T) {
|
||||
sec, err := totpext.GenerateSecret()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ph, err := HashPassword("test123", sec)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V0(t *testing.T) {
|
||||
ph, err := HashPasswordV0("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V1(t *testing.T) {
|
||||
ph, err := HashPasswordV1("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V2(t *testing.T) {
|
||||
ph, err := HashPasswordV2("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V3(t *testing.T) {
|
||||
ph, err := HashPasswordV3("test123", nil)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V3_TOTP(t *testing.T) {
|
||||
sec, err := totpext.GenerateSecret()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ph, err := HashPasswordV3("test123", sec)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V4(t *testing.T) {
|
||||
ph, err := HashPasswordV4("test123", nil)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V4_TOTP(t *testing.T) {
|
||||
sec, err := totpext.GenerateSecret()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ph, err := HashPasswordV4("test123", sec)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
}
|
@@ -4,10 +4,6 @@ import (
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
)
|
||||
|
||||
type RawFilter interface {
|
||||
FilterQuery() mongo.Pipeline
|
||||
}
|
||||
|
||||
type Filter interface {
|
||||
FilterQuery() mongo.Pipeline
|
||||
Pagination() (string, SortDirection, string, SortDirection)
|
||||
|
@@ -7,9 +7,6 @@ type SyncSet[TData comparable] struct {
|
||||
lock sync.Mutex
|
||||
}
|
||||
|
||||
// Add adds `value` to the set
|
||||
// returns true if the value was actually inserted
|
||||
// returns false if the value already existed
|
||||
func (s *SyncSet[TData]) Add(value TData) bool {
|
||||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
@@ -18,10 +15,10 @@ func (s *SyncSet[TData]) Add(value TData) bool {
|
||||
s.data = make(map[TData]bool)
|
||||
}
|
||||
|
||||
_, existsInPreState := s.data[value]
|
||||
_, ok := s.data[value]
|
||||
s.data[value] = true
|
||||
|
||||
return !existsInPreState
|
||||
return !ok
|
||||
}
|
||||
|
||||
func (s *SyncSet[TData]) AddAll(values []TData) {
|
||||
|
170
dataext/tuple.go
170
dataext/tuple.go
@@ -1,170 +0,0 @@
|
||||
package dataext
|
||||
|
||||
type ValueGroup interface {
|
||||
TupleLength() int
|
||||
TupleValues() []any
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Single[T1 any] struct {
|
||||
V1 T1
|
||||
}
|
||||
|
||||
func (s Single[T1]) TupleLength() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (s Single[T1]) TupleValues() []any {
|
||||
return []any{s.V1}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Tuple[T1 any, T2 any] struct {
|
||||
V1 T1
|
||||
V2 T2
|
||||
}
|
||||
|
||||
func (t Tuple[T1, T2]) TupleLength() int {
|
||||
return 2
|
||||
}
|
||||
|
||||
func (t Tuple[T1, T2]) TupleValues() []any {
|
||||
return []any{t.V1, t.V2}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Triple[T1 any, T2 any, T3 any] struct {
|
||||
V1 T1
|
||||
V2 T2
|
||||
V3 T3
|
||||
}
|
||||
|
||||
func (t Triple[T1, T2, T3]) TupleLength() int {
|
||||
return 3
|
||||
}
|
||||
|
||||
func (t Triple[T1, T2, T3]) TupleValues() []any {
|
||||
return []any{t.V1, t.V2, t.V3}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Quadruple[T1 any, T2 any, T3 any, T4 any] struct {
|
||||
V1 T1
|
||||
V2 T2
|
||||
V3 T3
|
||||
V4 T4
|
||||
}
|
||||
|
||||
func (t Quadruple[T1, T2, T3, T4]) TupleLength() int {
|
||||
return 4
|
||||
}
|
||||
|
||||
func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any {
|
||||
return []any{t.V1, t.V2, t.V3, t.V4}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct {
|
||||
V1 T1
|
||||
V2 T2
|
||||
V3 T3
|
||||
V4 T4
|
||||
V5 T5
|
||||
}
|
||||
|
||||
func (t Quintuple[T1, T2, T3, T4, T5]) TupleLength() int {
|
||||
return 5
|
||||
}
|
||||
|
||||
func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any {
|
||||
return []any{t.V1, t.V2, t.V3, t.V4, t.V5}
|
||||
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct {
|
||||
V1 T1
|
||||
V2 T2
|
||||
V3 T3
|
||||
V4 T4
|
||||
V5 T5
|
||||
V6 T6
|
||||
}
|
||||
|
||||
func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleLength() int {
|
||||
return 6
|
||||
}
|
||||
|
||||
func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any {
|
||||
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6}
|
||||
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct {
|
||||
V1 T1
|
||||
V2 T2
|
||||
V3 T3
|
||||
V4 T4
|
||||
V5 T5
|
||||
V6 T6
|
||||
V7 T7
|
||||
}
|
||||
|
||||
func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleLength() int {
|
||||
return 7
|
||||
}
|
||||
|
||||
func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any {
|
||||
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct {
|
||||
V1 T1
|
||||
V2 T2
|
||||
V3 T3
|
||||
V4 T4
|
||||
V5 T5
|
||||
V6 T6
|
||||
V7 T7
|
||||
V8 T8
|
||||
}
|
||||
|
||||
func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleLength() int {
|
||||
return 8
|
||||
}
|
||||
|
||||
func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any {
|
||||
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct {
|
||||
V1 T1
|
||||
V2 T2
|
||||
V3 T3
|
||||
V4 T4
|
||||
V5 T5
|
||||
V6 T6
|
||||
V7 T7
|
||||
V8 T8
|
||||
V9 T9
|
||||
}
|
||||
|
||||
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int {
|
||||
return 9
|
||||
}
|
||||
|
||||
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any {
|
||||
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9}
|
||||
}
|
@@ -1,31 +0,0 @@
|
||||
package enums
|
||||
|
||||
type Enum interface {
|
||||
Valid() bool
|
||||
ValuesAny() []any
|
||||
ValuesMeta() []EnumMetaValue
|
||||
VarName() string
|
||||
}
|
||||
|
||||
type StringEnum interface {
|
||||
Enum
|
||||
String() string
|
||||
}
|
||||
|
||||
type DescriptionEnum interface {
|
||||
Enum
|
||||
Description() string
|
||||
DescriptionMeta() EnumDescriptionMetaValue
|
||||
}
|
||||
|
||||
type EnumMetaValue struct {
|
||||
VarName string `json:"varName"`
|
||||
Value Enum `json:"value"`
|
||||
Description *string `json:"description"`
|
||||
}
|
||||
|
||||
type EnumDescriptionMetaValue struct {
|
||||
VarName string `json:"varName"`
|
||||
Value Enum `json:"value"`
|
||||
Description string `json:"description"`
|
||||
}
|
480
exerr/builder.go
480
exerr/builder.go
@@ -1,480 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/rs/zerolog"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/enums"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"net/http"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
//
|
||||
// ==== USAGE =====
|
||||
//
|
||||
// If some method returns an error _always wrap it into an exerror:
|
||||
// value, err := do_something(..)
|
||||
// if err != nil {
|
||||
// return nil, exerror.Wrap(err, "do something failed").Build()
|
||||
// }
|
||||
//
|
||||
// If possible add metadata to the error (eg the id that was not found, ...), the methods are the same as in zerolog
|
||||
// return nil, exerror.Wrap(err, "do something failed").Str("someid", id).Int("count", in.Count).Build()
|
||||
//
|
||||
// You can change the errortype with `.User()` and `.System()` (User-errors are 400 and System-errors 500)
|
||||
// You can also manually set the statuscode with `.WithStatuscode(http.NotFound)`
|
||||
// You can set the type with `WithType(..)`
|
||||
//
|
||||
// New Errors (that don't wrap an existing err object) are created with New
|
||||
// return nil, exerror.New(exerror.TypeInternal, "womethign wen horrible wrong").Build()
|
||||
// You can eitehr use an existing ErrorType, the "catch-all" ErrInternal, or add you own ErrType in consts.go
|
||||
//
|
||||
// All errors should be handled one of the following four ways:
|
||||
// - return the error to the caller and let him handle it:
|
||||
// (also auto-prints the error to the log)
|
||||
// => Wrap/New + Build
|
||||
// - Print the error
|
||||
// (also auto-sends it to the error-service)
|
||||
// This is useful for errors that happen asynchron or are non-fatal for the current request
|
||||
// => Wrap/New + Print
|
||||
// - Return the error to the Rest-API caller
|
||||
// (also auto-prints the error to the log)
|
||||
// (also auto-sends it to the error-service)
|
||||
// => Wrap/New + Output
|
||||
// - Print and stop the service
|
||||
// (also auto-sends it to the error-service)
|
||||
// => Wrap/New + Fatal
|
||||
//
|
||||
|
||||
var stackSkipLogger zerolog.Logger
|
||||
|
||||
func init() {
|
||||
cw := zerolog.ConsoleWriter{
|
||||
Out: os.Stdout,
|
||||
TimeFormat: "2006-01-02 15:04:05 Z07:00",
|
||||
}
|
||||
|
||||
multi := zerolog.MultiLevelWriter(cw)
|
||||
stackSkipLogger = zerolog.New(multi).With().Timestamp().CallerWithSkipFrameCount(4).Logger()
|
||||
}
|
||||
|
||||
type Builder struct {
|
||||
errorData *ExErr
|
||||
containsGinData bool
|
||||
noLog bool
|
||||
}
|
||||
|
||||
func Get(err error) *Builder {
|
||||
return &Builder{errorData: FromError(err)}
|
||||
}
|
||||
|
||||
func New(t ErrorType, msg string) *Builder {
|
||||
return &Builder{errorData: newExErr(CatSystem, t, msg)}
|
||||
}
|
||||
|
||||
func Wrap(err error, msg string) *Builder {
|
||||
if err == nil {
|
||||
return &Builder{errorData: newExErr(CatSystem, TypeInternal, msg)} // prevent NPE if we call Wrap with err==nil
|
||||
}
|
||||
|
||||
if !pkgconfig.RecursiveErrors {
|
||||
v := FromError(err)
|
||||
v.Message = msg
|
||||
return &Builder{errorData: v}
|
||||
}
|
||||
return &Builder{errorData: wrapExErr(FromError(err), msg, CatWrap, 1)}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
func (b *Builder) WithType(t ErrorType) *Builder {
|
||||
b.errorData.Type = t
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *Builder) WithStatuscode(status int) *Builder {
|
||||
b.errorData.StatusCode = &status
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *Builder) WithMessage(msg string) *Builder {
|
||||
b.errorData.Message = msg
|
||||
return b
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
// Err changes the Severity to ERROR (default)
|
||||
// The error will be:
|
||||
//
|
||||
// - On Build():
|
||||
//
|
||||
// - Short-Logged as Err
|
||||
//
|
||||
// - On Print():
|
||||
//
|
||||
// - Logged as Err
|
||||
//
|
||||
// - Send to the error-service
|
||||
//
|
||||
// - On Output():
|
||||
//
|
||||
// - Logged as Err
|
||||
//
|
||||
// - Send to the error-service
|
||||
func (b *Builder) Err() *Builder {
|
||||
b.errorData.Severity = SevErr
|
||||
return b
|
||||
}
|
||||
|
||||
// Warn changes the Severity to WARN
|
||||
// The error will be:
|
||||
//
|
||||
// - On Build():
|
||||
//
|
||||
// - -(nothing)-
|
||||
//
|
||||
// - On Print():
|
||||
//
|
||||
// - Short-Logged as Warn
|
||||
//
|
||||
// - On Output():
|
||||
//
|
||||
// - Logged as Warn
|
||||
func (b *Builder) Warn() *Builder {
|
||||
b.errorData.Severity = SevWarn
|
||||
return b
|
||||
}
|
||||
|
||||
// Info changes the Severity to INFO
|
||||
// The error will be:
|
||||
//
|
||||
// - On Build():
|
||||
//
|
||||
// - -(nothing)-
|
||||
//
|
||||
// - On Print():
|
||||
//
|
||||
// - -(nothing)-
|
||||
//
|
||||
// - On Output():
|
||||
//
|
||||
// - -(nothing)-
|
||||
func (b *Builder) Info() *Builder {
|
||||
b.errorData.Severity = SevInfo
|
||||
return b
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
// User sets the Category to CatUser
|
||||
//
|
||||
// Errors with category
|
||||
func (b *Builder) User() *Builder {
|
||||
b.errorData.Category = CatUser
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *Builder) System() *Builder {
|
||||
b.errorData.Category = CatSystem
|
||||
return b
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
func (b *Builder) NoLog() *Builder {
|
||||
b.noLog = true
|
||||
return b
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
func (b *Builder) Id(key string, val fmt.Stringer) *Builder {
|
||||
return b.addMeta(key, MDTID, newIDWrap(val))
|
||||
}
|
||||
|
||||
func (b *Builder) StrPtr(key string, val *string) *Builder {
|
||||
return b.addMeta(key, MDTStringPtr, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Str(key string, val string) *Builder {
|
||||
return b.addMeta(key, MDTString, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Int(key string, val int) *Builder {
|
||||
return b.addMeta(key, MDTInt, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Int8(key string, val int8) *Builder {
|
||||
return b.addMeta(key, MDTInt8, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Int16(key string, val int16) *Builder {
|
||||
return b.addMeta(key, MDTInt16, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Int32(key string, val int32) *Builder {
|
||||
return b.addMeta(key, MDTInt32, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Int64(key string, val int64) *Builder {
|
||||
return b.addMeta(key, MDTInt64, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Float32(key string, val float32) *Builder {
|
||||
return b.addMeta(key, MDTFloat32, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Float64(key string, val float64) *Builder {
|
||||
return b.addMeta(key, MDTFloat64, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Bool(key string, val bool) *Builder {
|
||||
return b.addMeta(key, MDTBool, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Bytes(key string, val []byte) *Builder {
|
||||
return b.addMeta(key, MDTBytes, val)
|
||||
}
|
||||
|
||||
func (b *Builder) ObjectID(key string, val primitive.ObjectID) *Builder {
|
||||
return b.addMeta(key, MDTObjectID, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Time(key string, val time.Time) *Builder {
|
||||
return b.addMeta(key, MDTTime, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Dur(key string, val time.Duration) *Builder {
|
||||
return b.addMeta(key, MDTDuration, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Strs(key string, val []string) *Builder {
|
||||
return b.addMeta(key, MDTStringArray, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Ints(key string, val []int) *Builder {
|
||||
return b.addMeta(key, MDTIntArray, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Ints32(key string, val []int32) *Builder {
|
||||
return b.addMeta(key, MDTInt32Array, val)
|
||||
}
|
||||
|
||||
func (b *Builder) Type(key string, cls interface{}) *Builder {
|
||||
return b.addMeta(key, MDTString, fmt.Sprintf("%T", cls))
|
||||
}
|
||||
|
||||
func (b *Builder) Interface(key string, val interface{}) *Builder {
|
||||
return b.addMeta(key, MDTAny, newAnyWrap(val))
|
||||
}
|
||||
|
||||
func (b *Builder) Any(key string, val any) *Builder {
|
||||
return b.addMeta(key, MDTAny, newAnyWrap(val))
|
||||
}
|
||||
|
||||
func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder {
|
||||
if langext.IsNil(val) {
|
||||
return b.addMeta(key, MDTString, "(!nil)")
|
||||
} else {
|
||||
return b.addMeta(key, MDTString, val.String())
|
||||
}
|
||||
}
|
||||
|
||||
func (b *Builder) Enum(key string, val enums.Enum) *Builder {
|
||||
return b.addMeta(key, MDTEnum, newEnumWrap(val))
|
||||
}
|
||||
|
||||
func (b *Builder) Stack() *Builder {
|
||||
return b.addMeta("@Stack", MDTString, string(debug.Stack()))
|
||||
}
|
||||
|
||||
func (b *Builder) Errs(key string, val []error) *Builder {
|
||||
for i, valerr := range val {
|
||||
b.addMeta(fmt.Sprintf("%v[%v]", key, i), MDTString, Get(valerr).errorData.FormatLog(LogPrintFull))
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder {
|
||||
if v := ctx.Value("start_timestamp"); v != nil {
|
||||
if t, ok := v.(time.Time); ok {
|
||||
b.Time("ctx.startTimestamp", t)
|
||||
b.Time("ctx.endTimestamp", time.Now())
|
||||
}
|
||||
}
|
||||
b.Str("gin.method", req.Method)
|
||||
b.Str("gin.path", g.FullPath())
|
||||
b.Strs("gin.header", extractHeader(g.Request.Header))
|
||||
if req.URL != nil {
|
||||
b.Str("gin.url", req.URL.String())
|
||||
}
|
||||
if ctxVal := g.GetString("apiversion"); ctxVal != "" {
|
||||
b.Str("gin.context.apiversion", ctxVal)
|
||||
}
|
||||
if ctxVal := g.GetString("uid"); ctxVal != "" {
|
||||
b.Str("gin.context.uid", ctxVal)
|
||||
}
|
||||
if ctxVal := g.GetString("fcmId"); ctxVal != "" {
|
||||
b.Str("gin.context.fcmid", ctxVal)
|
||||
}
|
||||
if ctxVal := g.GetString("reqid"); ctxVal != "" {
|
||||
b.Str("gin.context.reqid", ctxVal)
|
||||
}
|
||||
if req.Method != "GET" && req.Body != nil {
|
||||
|
||||
if req.Header.Get("Content-Type") == "application/json" {
|
||||
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
|
||||
if bin, err := brc.BufferedAll(); err == nil {
|
||||
if len(bin) < 16*1024 {
|
||||
var prettyJSON bytes.Buffer
|
||||
err = json.Indent(&prettyJSON, bin, "", " ")
|
||||
if err == nil {
|
||||
b.Str("gin.body", string(prettyJSON.Bytes()))
|
||||
} else {
|
||||
b.Bytes("gin.body", bin)
|
||||
}
|
||||
} else {
|
||||
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if req.Header.Get("Content-Type") == "multipart/form-data" || req.Header.Get("Content-Type") == "x-www-form-urlencoded" {
|
||||
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
|
||||
if bin, err := brc.BufferedAll(); err == nil {
|
||||
if len(bin) < 16*1024 {
|
||||
b.Bytes("gin.body", bin)
|
||||
} else {
|
||||
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
b.containsGinData = true
|
||||
return b
|
||||
}
|
||||
|
||||
func formatHeader(header map[string][]string) string {
|
||||
ml := 1
|
||||
for k, _ := range header {
|
||||
if len(k) > ml {
|
||||
ml = len(k)
|
||||
}
|
||||
}
|
||||
r := ""
|
||||
for k, v := range header {
|
||||
if r != "" {
|
||||
r += "\n"
|
||||
}
|
||||
for _, hval := range v {
|
||||
value := hval
|
||||
value = strings.ReplaceAll(value, "\n", "\\n")
|
||||
value = strings.ReplaceAll(value, "\r", "\\r")
|
||||
value = strings.ReplaceAll(value, "\t", "\\t")
|
||||
r += langext.StrPadRight(k, " ", ml) + " := " + value
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func extractHeader(header map[string][]string) []string {
|
||||
r := make([]string, 0, len(header))
|
||||
for k, v := range header {
|
||||
for _, hval := range v {
|
||||
value := hval
|
||||
value = strings.ReplaceAll(value, "\n", "\\n")
|
||||
value = strings.ReplaceAll(value, "\r", "\\r")
|
||||
value = strings.ReplaceAll(value, "\t", "\\t")
|
||||
r = append(r, k+": "+value)
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
// Build creates a new error, ready to pass up the stack
|
||||
// If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout
|
||||
// Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces
|
||||
// Can be locally suppressed with Builder.NoLog()
|
||||
func (b *Builder) Build() error {
|
||||
warnOnPkgConfigNotInitialized()
|
||||
|
||||
if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) {
|
||||
b.errorData.ShortLog(stackSkipLogger.Error())
|
||||
} else if pkgconfig.ZeroLogAllTraces && !b.noLog {
|
||||
b.errorData.ShortLog(stackSkipLogger.Error())
|
||||
}
|
||||
|
||||
b.CallListener(MethodBuild)
|
||||
|
||||
return b.errorData
|
||||
}
|
||||
|
||||
// Output prints the error onto the gin stdout.
|
||||
// The error also gets printed to stdout/stderr
|
||||
// If the error is SevErr|SevFatal we also send it to the error-service
|
||||
func (b *Builder) Output(ctx context.Context, g *gin.Context) {
|
||||
if !b.containsGinData && g.Request != nil {
|
||||
// Auto-Add gin metadata if the caller hasn't already done it
|
||||
b.GinReq(ctx, g, g.Request)
|
||||
}
|
||||
|
||||
b.errorData.Output(g)
|
||||
|
||||
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
|
||||
b.errorData.Log(stackSkipLogger.Error())
|
||||
} else if b.errorData.Severity == SevWarn {
|
||||
b.errorData.Log(stackSkipLogger.Warn())
|
||||
}
|
||||
|
||||
b.CallListener(MethodOutput)
|
||||
}
|
||||
|
||||
// Print prints the error
|
||||
// If the error is SevErr we also send it to the error-service
|
||||
func (b *Builder) Print() {
|
||||
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
|
||||
b.errorData.Log(stackSkipLogger.Error())
|
||||
} else if b.errorData.Severity == SevWarn {
|
||||
b.errorData.ShortLog(stackSkipLogger.Warn())
|
||||
}
|
||||
|
||||
b.CallListener(MethodPrint)
|
||||
}
|
||||
|
||||
func (b *Builder) Format(level LogPrintLevel) string {
|
||||
return b.errorData.FormatLog(level)
|
||||
}
|
||||
|
||||
// Fatal prints the error and terminates the program
|
||||
// If the error is SevErr we also send it to the error-service
|
||||
func (b *Builder) Fatal() {
|
||||
b.errorData.Severity = SevFatal
|
||||
b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel))
|
||||
|
||||
b.CallListener(MethodFatal)
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
func (b *Builder) addMeta(key string, mdtype metaDataType, val interface{}) *Builder {
|
||||
b.errorData.Meta.add(key, mdtype, val)
|
||||
return b
|
||||
}
|
@@ -1,204 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
var reflectTypeStr = reflect.TypeOf("")
|
||||
|
||||
func FromError(err error) *ExErr {
|
||||
if verr, ok := err.(*ExErr); ok {
|
||||
// A simple ExErr
|
||||
return verr
|
||||
}
|
||||
|
||||
// A foreign error (eg a MongoDB exception)
|
||||
return &ExErr{
|
||||
UniqueID: newID(),
|
||||
Category: CatForeign,
|
||||
Type: TypeInternal,
|
||||
Severity: SevErr,
|
||||
Timestamp: time.Time{},
|
||||
StatusCode: nil,
|
||||
Message: err.Error(),
|
||||
WrappedErrType: fmt.Sprintf("%T", err),
|
||||
WrappedErr: err,
|
||||
Caller: "",
|
||||
OriginalError: nil,
|
||||
Meta: getForeignMeta(err),
|
||||
}
|
||||
}
|
||||
|
||||
func newExErr(cat ErrorCategory, errtype ErrorType, msg string) *ExErr {
|
||||
return &ExErr{
|
||||
UniqueID: newID(),
|
||||
Category: cat,
|
||||
Type: errtype,
|
||||
Severity: SevErr,
|
||||
Timestamp: time.Now(),
|
||||
StatusCode: nil,
|
||||
Message: msg,
|
||||
WrappedErrType: "",
|
||||
WrappedErr: nil,
|
||||
Caller: callername(2),
|
||||
OriginalError: nil,
|
||||
Meta: make(map[string]MetaValue),
|
||||
}
|
||||
}
|
||||
|
||||
func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExErr {
|
||||
return &ExErr{
|
||||
UniqueID: newID(),
|
||||
Category: cat,
|
||||
Type: TypeWrap,
|
||||
Severity: SevErr,
|
||||
Timestamp: time.Now(),
|
||||
StatusCode: e.StatusCode,
|
||||
Message: msg,
|
||||
WrappedErrType: "",
|
||||
WrappedErr: nil,
|
||||
Caller: callername(1 + stacktraceskip),
|
||||
OriginalError: e,
|
||||
Meta: make(map[string]MetaValue),
|
||||
}
|
||||
}
|
||||
|
||||
func getForeignMeta(err error) (mm MetaMap) {
|
||||
mm = make(map[string]MetaValue)
|
||||
|
||||
defer func() {
|
||||
if panicerr := recover(); panicerr != nil {
|
||||
New(TypePanic, "Panic while trying to get foreign meta").
|
||||
Str("source", err.Error()).
|
||||
Interface("panic-object", panicerr).
|
||||
Stack().
|
||||
Print()
|
||||
}
|
||||
}()
|
||||
|
||||
rval := reflect.ValueOf(err)
|
||||
if rval.Kind() == reflect.Interface || rval.Kind() == reflect.Ptr {
|
||||
rval = reflect.ValueOf(err).Elem()
|
||||
}
|
||||
|
||||
mm.add("foreign.errortype", MDTString, rval.Type().String())
|
||||
|
||||
for k, v := range addMetaPrefix("foreign", getReflectedMetaValues(err, 8)) {
|
||||
mm[k] = v
|
||||
}
|
||||
|
||||
return mm
|
||||
}
|
||||
|
||||
func getReflectedMetaValues(value interface{}, remainingDepth int) map[string]MetaValue {
|
||||
|
||||
if remainingDepth <= 0 {
|
||||
return map[string]MetaValue{}
|
||||
}
|
||||
|
||||
if langext.IsNil(value) {
|
||||
return map[string]MetaValue{"": {DataType: MDTNil, Value: nil}}
|
||||
}
|
||||
|
||||
rval := reflect.ValueOf(value)
|
||||
|
||||
if rval.Type().Kind() == reflect.Ptr {
|
||||
|
||||
if rval.IsNil() {
|
||||
return map[string]MetaValue{"*": {DataType: MDTNil, Value: nil}}
|
||||
}
|
||||
|
||||
elem := rval.Elem()
|
||||
|
||||
return addMetaPrefix("*", getReflectedMetaValues(elem.Interface(), remainingDepth-1))
|
||||
}
|
||||
|
||||
if !rval.CanInterface() {
|
||||
return map[string]MetaValue{"": {DataType: MDTString, Value: "<<no-interface>>"}}
|
||||
}
|
||||
|
||||
raw := rval.Interface()
|
||||
|
||||
switch ifraw := raw.(type) {
|
||||
case time.Time:
|
||||
return map[string]MetaValue{"": {DataType: MDTTime, Value: ifraw}}
|
||||
case time.Duration:
|
||||
return map[string]MetaValue{"": {DataType: MDTDuration, Value: ifraw}}
|
||||
case int:
|
||||
return map[string]MetaValue{"": {DataType: MDTInt, Value: ifraw}}
|
||||
case int8:
|
||||
return map[string]MetaValue{"": {DataType: MDTInt8, Value: ifraw}}
|
||||
case int16:
|
||||
return map[string]MetaValue{"": {DataType: MDTInt16, Value: ifraw}}
|
||||
case int32:
|
||||
return map[string]MetaValue{"": {DataType: MDTInt32, Value: ifraw}}
|
||||
case int64:
|
||||
return map[string]MetaValue{"": {DataType: MDTInt64, Value: ifraw}}
|
||||
case string:
|
||||
return map[string]MetaValue{"": {DataType: MDTString, Value: ifraw}}
|
||||
case bool:
|
||||
return map[string]MetaValue{"": {DataType: MDTBool, Value: ifraw}}
|
||||
case []byte:
|
||||
return map[string]MetaValue{"": {DataType: MDTBytes, Value: ifraw}}
|
||||
case float32:
|
||||
return map[string]MetaValue{"": {DataType: MDTFloat32, Value: ifraw}}
|
||||
case float64:
|
||||
return map[string]MetaValue{"": {DataType: MDTFloat64, Value: ifraw}}
|
||||
case []int:
|
||||
return map[string]MetaValue{"": {DataType: MDTIntArray, Value: ifraw}}
|
||||
case []int32:
|
||||
return map[string]MetaValue{"": {DataType: MDTInt32Array, Value: ifraw}}
|
||||
case primitive.ObjectID:
|
||||
return map[string]MetaValue{"": {DataType: MDTObjectID, Value: ifraw}}
|
||||
case []string:
|
||||
return map[string]MetaValue{"": {DataType: MDTStringArray, Value: ifraw}}
|
||||
}
|
||||
|
||||
if rval.Type().Kind() == reflect.Struct {
|
||||
m := make(map[string]MetaValue)
|
||||
for i := 0; i < rval.NumField(); i++ {
|
||||
fieldtype := rval.Type().Field(i)
|
||||
|
||||
fieldname := fieldtype.Name
|
||||
|
||||
if fieldtype.IsExported() {
|
||||
for k, v := range addMetaPrefix(fieldname, getReflectedMetaValues(rval.Field(i).Interface(), remainingDepth-1)) {
|
||||
m[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
if rval.Type().ConvertibleTo(reflectTypeStr) {
|
||||
return map[string]MetaValue{"": {DataType: MDTString, Value: rval.Convert(reflectTypeStr).String()}}
|
||||
}
|
||||
|
||||
jsonval, err := json.Marshal(value)
|
||||
if err != nil {
|
||||
panic(err) // gets recovered later up
|
||||
}
|
||||
|
||||
return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}}
|
||||
}
|
||||
|
||||
func addMetaPrefix(prefix string, m map[string]MetaValue) map[string]MetaValue {
|
||||
if len(m) == 1 {
|
||||
for k, v := range m {
|
||||
if k == "" {
|
||||
return map[string]MetaValue{prefix: v}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
r := make(map[string]MetaValue, len(m))
|
||||
for k, v := range m {
|
||||
r[prefix+"."+k] = v
|
||||
}
|
||||
return r
|
||||
}
|
@@ -1,88 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
)
|
||||
|
||||
type ErrorCategory struct{ Category string }
|
||||
|
||||
var (
|
||||
CatWrap = ErrorCategory{"Wrap"} // The error is simply wrapping another error (e.g. when a grpc call returns an error)
|
||||
CatSystem = ErrorCategory{"System"} // An internal system error (e.g. connection to db failed)
|
||||
CatUser = ErrorCategory{"User"} // The user (the API caller) did something wrong (e.g. he has no permissions to do this)
|
||||
CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value
|
||||
)
|
||||
|
||||
//goland:noinspection GoUnusedGlobalVariable
|
||||
var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign}
|
||||
|
||||
type ErrorSeverity struct{ Severity string }
|
||||
|
||||
var (
|
||||
SevTrace = ErrorSeverity{"Trace"}
|
||||
SevDebug = ErrorSeverity{"Debug"}
|
||||
SevInfo = ErrorSeverity{"Info"}
|
||||
SevWarn = ErrorSeverity{"Warn"}
|
||||
SevErr = ErrorSeverity{"Err"}
|
||||
SevFatal = ErrorSeverity{"Fatal"}
|
||||
)
|
||||
|
||||
//goland:noinspection GoUnusedGlobalVariable
|
||||
var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal}
|
||||
|
||||
type ErrorType struct {
|
||||
Key string
|
||||
DefaultStatusCode *int
|
||||
}
|
||||
|
||||
//goland:noinspection GoUnusedGlobalVariable
|
||||
var (
|
||||
TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
|
||||
TypePanic = NewType("PANIC", langext.Ptr(500))
|
||||
TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
|
||||
|
||||
TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
|
||||
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
|
||||
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
|
||||
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
|
||||
TypeMongoInvalidOpt = NewType("MONGO_INVALIDOPT", langext.Ptr(500))
|
||||
|
||||
TypeWrap = NewType("Wrap", nil)
|
||||
|
||||
TypeBindFailURI = NewType("BINDFAIL_URI", langext.Ptr(400))
|
||||
TypeBindFailQuery = NewType("BINDFAIL_QUERY", langext.Ptr(400))
|
||||
TypeBindFailJSON = NewType("BINDFAIL_JSON", langext.Ptr(400))
|
||||
TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400))
|
||||
TypeBindFailHeader = NewType("BINDFAIL_HEADER", langext.Ptr(400))
|
||||
|
||||
TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400))
|
||||
TypeInvalidCSID = NewType("INVALID_CSID", langext.Ptr(400))
|
||||
|
||||
TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400))
|
||||
TypeGoogleResponse = NewType("GOOGLE_RESPONSE", langext.Ptr(400))
|
||||
|
||||
TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401))
|
||||
TypeAuthFailed = NewType("AUTH_FAILED", langext.Ptr(401))
|
||||
|
||||
// other values come the used package
|
||||
)
|
||||
|
||||
var registeredTypes = dataext.SyncSet[string]{}
|
||||
|
||||
func NewType(key string, defStatusCode *int) ErrorType {
|
||||
insertOkay := registeredTypes.Add(key)
|
||||
if !insertOkay {
|
||||
panic("Cannot register same ErrType ('" + key + "') more than once")
|
||||
}
|
||||
|
||||
return ErrorType{key, defStatusCode}
|
||||
}
|
||||
|
||||
type LogPrintLevel string
|
||||
|
||||
const (
|
||||
LogPrintFull LogPrintLevel = "Full"
|
||||
LogPrintOverview LogPrintLevel = "Overview"
|
||||
LogPrintShort LogPrintLevel = "Short"
|
||||
)
|
@@ -1,84 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
)
|
||||
|
||||
type ErrorPackageConfig struct {
|
||||
ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal)
|
||||
ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities)
|
||||
RecursiveErrors bool // errors contains their Origin-Error
|
||||
ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output()
|
||||
IncludeMetaInGinOutput bool // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output()
|
||||
ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields
|
||||
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields
|
||||
}
|
||||
|
||||
type ErrorPackageConfigInit struct {
|
||||
ZeroLogErrTraces *bool
|
||||
ZeroLogAllTraces *bool
|
||||
RecursiveErrors *bool
|
||||
ExtendedGinOutput *bool
|
||||
IncludeMetaInGinOutput *bool
|
||||
ExtendGinOutput func(err *ExErr, json map[string]any)
|
||||
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any)
|
||||
}
|
||||
|
||||
var initialized = false
|
||||
|
||||
var pkgconfig = ErrorPackageConfig{
|
||||
ZeroLogErrTraces: true,
|
||||
ZeroLogAllTraces: false,
|
||||
RecursiveErrors: true,
|
||||
ExtendedGinOutput: false,
|
||||
IncludeMetaInGinOutput: true,
|
||||
ExtendGinOutput: func(err *ExErr, json map[string]any) {},
|
||||
ExtendGinDataOutput: func(err *ExErr, depth int, json map[string]any) {},
|
||||
}
|
||||
|
||||
// Init initializes the exerr packages
|
||||
// Must be called at the program start, before (!) any errors
|
||||
// Is not thread-safe
|
||||
func Init(cfg ErrorPackageConfigInit) {
|
||||
if initialized {
|
||||
panic("Cannot re-init error package")
|
||||
}
|
||||
|
||||
ego := func(err *ExErr, json map[string]any) {}
|
||||
egdo := func(err *ExErr, depth int, json map[string]any) {}
|
||||
|
||||
if cfg.ExtendGinOutput != nil {
|
||||
ego = cfg.ExtendGinOutput
|
||||
}
|
||||
if cfg.ExtendGinDataOutput != nil {
|
||||
egdo = cfg.ExtendGinDataOutput
|
||||
}
|
||||
|
||||
pkgconfig = ErrorPackageConfig{
|
||||
ZeroLogErrTraces: langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces),
|
||||
ZeroLogAllTraces: langext.Coalesce(cfg.ZeroLogAllTraces, pkgconfig.ZeroLogAllTraces),
|
||||
RecursiveErrors: langext.Coalesce(cfg.RecursiveErrors, pkgconfig.RecursiveErrors),
|
||||
ExtendedGinOutput: langext.Coalesce(cfg.ExtendedGinOutput, pkgconfig.ExtendedGinOutput),
|
||||
IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput),
|
||||
ExtendGinOutput: ego,
|
||||
ExtendGinDataOutput: egdo,
|
||||
}
|
||||
|
||||
initialized = true
|
||||
}
|
||||
|
||||
func Initialized() bool {
|
||||
return initialized
|
||||
}
|
||||
|
||||
func warnOnPkgConfigNotInitialized() {
|
||||
if !initialized {
|
||||
fmt.Printf("\n")
|
||||
fmt.Printf("%s\n", langext.StrRepeat("=", 80))
|
||||
fmt.Printf("%s\n", "[WARNING] exerr package used without initializiation")
|
||||
fmt.Printf("%s\n", " call exerr.Init() in your main() function")
|
||||
fmt.Printf("%s\n", langext.StrRepeat("=", 80))
|
||||
fmt.Printf("\n")
|
||||
}
|
||||
}
|
316
exerr/exerr.go
316
exerr/exerr.go
@@ -1,316 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"github.com/rs/xid"
|
||||
"github.com/rs/zerolog"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ExErr struct {
|
||||
UniqueID string `json:"uniqueID"`
|
||||
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
Category ErrorCategory `json:"category"`
|
||||
Severity ErrorSeverity `json:"severity"`
|
||||
Type ErrorType `json:"type"`
|
||||
|
||||
StatusCode *int `json:"statusCode"`
|
||||
|
||||
Message string `json:"message"`
|
||||
WrappedErrType string `json:"wrappedErrType"`
|
||||
WrappedErr any `json:"-"`
|
||||
Caller string `json:"caller"`
|
||||
|
||||
OriginalError *ExErr `json:"originalError"`
|
||||
|
||||
Meta MetaMap `json:"meta"`
|
||||
}
|
||||
|
||||
func (ee *ExErr) Error() string {
|
||||
return ee.RecursiveMessage()
|
||||
}
|
||||
|
||||
// Unwrap must be implemented so that some error.XXX methods work
|
||||
func (ee *ExErr) Unwrap() error {
|
||||
if ee.OriginalError == nil {
|
||||
return nil // this is neccessary - otherwise we return a wrapped nil and the `x == nil` comparison fails (= panic in errors.Is and other failures)
|
||||
}
|
||||
return ee.OriginalError
|
||||
}
|
||||
|
||||
// Is must be implemented so that error.Is(x) works
|
||||
func (ee *ExErr) Is(e error) bool {
|
||||
return IsFrom(ee, e)
|
||||
}
|
||||
|
||||
// As must be implemented so that error.As(x) works
|
||||
//
|
||||
//goland:noinspection GoTypeAssertionOnErrors
|
||||
func (ee *ExErr) As(target any) bool {
|
||||
if dstErr, ok := target.(*ExErr); ok {
|
||||
|
||||
if dst0, ok := ee.contains(dstErr); ok {
|
||||
dstErr = dst0
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
val := reflect.ValueOf(target)
|
||||
|
||||
typStr := val.Type().Elem().String()
|
||||
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
if curr.Category == CatForeign && curr.WrappedErrType == typStr && curr.WrappedErr != nil {
|
||||
val.Elem().Set(reflect.ValueOf(curr.WrappedErr))
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (ee *ExErr) Log(evt *zerolog.Event) {
|
||||
evt.Msg(ee.FormatLog(LogPrintFull))
|
||||
}
|
||||
|
||||
func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
|
||||
if lvl == LogPrintShort {
|
||||
|
||||
msg := ee.Message
|
||||
if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign {
|
||||
msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")"
|
||||
}
|
||||
|
||||
if ee.Type != TypeWrap {
|
||||
return "[" + ee.Type.Key + "] " + msg
|
||||
} else {
|
||||
return msg
|
||||
}
|
||||
|
||||
} else if lvl == LogPrintOverview {
|
||||
|
||||
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
|
||||
|
||||
indent := ""
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
indent += " "
|
||||
|
||||
str += indent
|
||||
str += "-> "
|
||||
strmsg := strings.Trim(curr.Message, " \r\n\t")
|
||||
if lbidx := strings.Index(curr.Message, "\n"); lbidx >= 0 {
|
||||
strmsg = strmsg[0:lbidx]
|
||||
}
|
||||
strmsg = langext.StrLimit(strmsg, 61, "...")
|
||||
str += strmsg
|
||||
str += "\n"
|
||||
|
||||
}
|
||||
return str
|
||||
|
||||
} else if lvl == LogPrintFull {
|
||||
|
||||
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
|
||||
|
||||
indent := ""
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
indent += " "
|
||||
|
||||
etype := ee.Type.Key
|
||||
if ee.Type == TypeWrap {
|
||||
etype = "~"
|
||||
}
|
||||
|
||||
str += indent
|
||||
str += "-> ["
|
||||
str += etype
|
||||
if curr.Category == CatForeign {
|
||||
str += "|Foreign"
|
||||
}
|
||||
str += "] "
|
||||
str += strings.ReplaceAll(curr.Message, "\n", " ")
|
||||
if curr.Caller != "" {
|
||||
str += " (@ "
|
||||
str += curr.Caller
|
||||
str += ")"
|
||||
}
|
||||
str += "\n"
|
||||
|
||||
if curr.Meta.Any() {
|
||||
meta := indent + " {" + curr.Meta.FormatOneLine(240) + "}"
|
||||
if len(meta) < 200 {
|
||||
str += meta
|
||||
str += "\n"
|
||||
} else {
|
||||
str += curr.Meta.FormatMultiLine(indent+" ", " ", 1024)
|
||||
str += "\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
return str
|
||||
|
||||
} else {
|
||||
|
||||
return "[?[" + ee.UniqueID + "]?]"
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func (ee *ExErr) ShortLog(evt *zerolog.Event) {
|
||||
ee.Meta.Apply(evt, langext.Ptr(240)).Msg(ee.FormatLog(LogPrintShort))
|
||||
}
|
||||
|
||||
// RecursiveMessage returns the message to show
|
||||
// = first error (top-down) that is not wrapping/foreign/empty
|
||||
// = lowest level error (that is not empty)
|
||||
// = fallback to self.message
|
||||
func (ee *ExErr) RecursiveMessage() string {
|
||||
|
||||
// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty
|
||||
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign {
|
||||
return curr.Message
|
||||
}
|
||||
}
|
||||
|
||||
// ==== [2] ==== lowest level error (that is not empty)
|
||||
|
||||
deepestMsg := ""
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
if curr.Message != "" {
|
||||
deepestMsg = curr.Message
|
||||
}
|
||||
}
|
||||
if deepestMsg != "" {
|
||||
return deepestMsg
|
||||
}
|
||||
|
||||
// ==== [3] ==== fallback to self.message
|
||||
|
||||
return ee.Message
|
||||
}
|
||||
|
||||
// RecursiveType returns the statuscode to use
|
||||
// = first error (top-down) that is not wrapping/empty
|
||||
func (ee *ExErr) RecursiveType() ErrorType {
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
if curr.Type != TypeWrap {
|
||||
return curr.Type
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// fallback to self
|
||||
return ee.Type
|
||||
}
|
||||
|
||||
// RecursiveStatuscode returns the HTTP Statuscode to use
|
||||
// = first error (top-down) that has a statuscode set
|
||||
func (ee *ExErr) RecursiveStatuscode() *int {
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
if curr.StatusCode != nil {
|
||||
return langext.Ptr(*curr.StatusCode)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// RecursiveCategory returns the ErrorCategory to use
|
||||
// = first error (top-down) that has a statuscode set
|
||||
func (ee *ExErr) RecursiveCategory() ErrorCategory {
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
if curr.Category != CatWrap {
|
||||
return curr.Category
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to <empty>
|
||||
return ee.Category
|
||||
}
|
||||
|
||||
// RecursiveMeta searches (top-down) for teh first error that has a meta value with teh specified key
|
||||
// and returns its value (or nil)
|
||||
func (ee *ExErr) RecursiveMeta(key string) *MetaValue {
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
if metaval, ok := curr.Meta[key]; ok {
|
||||
return langext.Ptr(metaval)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Depth returns the depth of recursively contained errors
|
||||
func (ee *ExErr) Depth() int {
|
||||
if ee.OriginalError == nil {
|
||||
return 1
|
||||
} else {
|
||||
return ee.OriginalError.Depth() + 1
|
||||
}
|
||||
}
|
||||
|
||||
// contains test if the supplied error is contained in this error (anywhere in the chain)
|
||||
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
|
||||
if original == nil {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
if ee == original {
|
||||
return ee, true
|
||||
}
|
||||
|
||||
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||
if curr.equalsDirectProperties(curr) {
|
||||
return curr, true
|
||||
}
|
||||
}
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// equalsDirectProperties tests if ee and other are equals, but only looks at primary properties (not `OriginalError` or `Meta`)
|
||||
func (ee *ExErr) equalsDirectProperties(other *ExErr) bool {
|
||||
|
||||
if ee.UniqueID != other.UniqueID {
|
||||
return false
|
||||
}
|
||||
if ee.Timestamp != other.Timestamp {
|
||||
return false
|
||||
}
|
||||
if ee.Category != other.Category {
|
||||
return false
|
||||
}
|
||||
if ee.Severity != other.Severity {
|
||||
return false
|
||||
}
|
||||
if ee.Type != other.Type {
|
||||
return false
|
||||
}
|
||||
if ee.StatusCode != other.StatusCode {
|
||||
return false
|
||||
}
|
||||
if ee.Message != other.Message {
|
||||
return false
|
||||
}
|
||||
if ee.WrappedErrType != other.WrappedErrType {
|
||||
return false
|
||||
}
|
||||
if ee.Caller != other.Caller {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func newID() string {
|
||||
return xid.New().String()
|
||||
}
|
@@ -1,93 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type golangErr struct {
|
||||
Message string
|
||||
}
|
||||
|
||||
func (g golangErr) Error() string {
|
||||
return g.Message
|
||||
}
|
||||
|
||||
type golangErr2 struct {
|
||||
Message string
|
||||
}
|
||||
|
||||
func (g golangErr2) Error() string {
|
||||
return g.Message
|
||||
}
|
||||
|
||||
type simpleError struct {
|
||||
}
|
||||
|
||||
func (g simpleError) Error() string {
|
||||
return "Something simple went wroong"
|
||||
}
|
||||
|
||||
type simpleError2 struct {
|
||||
}
|
||||
|
||||
func (g simpleError2) Error() string {
|
||||
return "Something simple went wroong"
|
||||
}
|
||||
|
||||
func TestExErrIs1(t *testing.T) {
|
||||
e0 := simpleError{}
|
||||
|
||||
wrap := Wrap(e0, "something went wrong").Str("test", "123").Build()
|
||||
|
||||
tst.AssertTrue(t, errors.Is(wrap, simpleError{}))
|
||||
tst.AssertFalse(t, errors.Is(wrap, golangErr{}))
|
||||
tst.AssertFalse(t, errors.Is(wrap, golangErr{"error1"}))
|
||||
}
|
||||
|
||||
func TestExErrIs2(t *testing.T) {
|
||||
e0 := golangErr{"error1"}
|
||||
|
||||
wrap := Wrap(e0, "something went wrong").Str("test", "123").Build()
|
||||
|
||||
tst.AssertTrue(t, errors.Is(wrap, e0))
|
||||
tst.AssertTrue(t, errors.Is(wrap, golangErr{"error1"}))
|
||||
tst.AssertFalse(t, errors.Is(wrap, golangErr{"error2"}))
|
||||
tst.AssertFalse(t, errors.Is(wrap, simpleError{}))
|
||||
}
|
||||
|
||||
func TestExErrAs(t *testing.T) {
|
||||
|
||||
e0 := golangErr{"error1"}
|
||||
|
||||
w0 := Wrap(e0, "something went wrong").Str("test", "123").Build()
|
||||
|
||||
{
|
||||
out := golangErr{}
|
||||
ok := errors.As(w0, &out)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertEqual(t, out.Message, "error1")
|
||||
}
|
||||
|
||||
w1 := Wrap(w0, "outher error").Build()
|
||||
|
||||
{
|
||||
out := golangErr{}
|
||||
ok := errors.As(w1, &out)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertEqual(t, out.Message, "error1")
|
||||
}
|
||||
|
||||
{
|
||||
out := golangErr2{}
|
||||
ok := errors.As(w1, &out)
|
||||
tst.AssertFalse(t, ok)
|
||||
}
|
||||
|
||||
{
|
||||
out := simpleError2{}
|
||||
ok := errors.As(w1, &out)
|
||||
tst.AssertFalse(t, ok)
|
||||
}
|
||||
}
|
112
exerr/gin.go
112
exerr/gin.go
@@ -1,112 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) langext.H {
|
||||
ginJson := langext.H{}
|
||||
|
||||
if ee.UniqueID != "" {
|
||||
ginJson["id"] = ee.UniqueID
|
||||
}
|
||||
if ee.Category != CatWrap {
|
||||
ginJson["category"] = ee.Category
|
||||
}
|
||||
if ee.Type != TypeWrap {
|
||||
ginJson["type"] = ee.Type
|
||||
}
|
||||
if ee.StatusCode != nil {
|
||||
ginJson["statuscode"] = ee.StatusCode
|
||||
}
|
||||
if ee.Message != "" {
|
||||
ginJson["message"] = ee.Message
|
||||
}
|
||||
if ee.Caller != "" {
|
||||
ginJson["caller"] = ee.Caller
|
||||
}
|
||||
if ee.Severity != SevErr {
|
||||
ginJson["severity"] = ee.Severity
|
||||
}
|
||||
if ee.Timestamp != (time.Time{}) {
|
||||
ginJson["time"] = ee.Timestamp.Format(time.RFC3339)
|
||||
}
|
||||
if ee.WrappedErrType != "" {
|
||||
ginJson["wrappedErrType"] = ee.WrappedErrType
|
||||
}
|
||||
if ee.OriginalError != nil {
|
||||
ginJson["original"] = ee.OriginalError.toJson(depth+1, applyExtendListener, outputMeta)
|
||||
}
|
||||
|
||||
if outputMeta {
|
||||
metaJson := langext.H{}
|
||||
for metaKey, metaVal := range ee.Meta {
|
||||
metaJson[metaKey] = metaVal.rawValueForJson()
|
||||
}
|
||||
ginJson["meta"] = metaJson
|
||||
}
|
||||
|
||||
if applyExtendListener {
|
||||
pkgconfig.ExtendGinDataOutput(ee, depth, ginJson)
|
||||
}
|
||||
|
||||
return ginJson
|
||||
}
|
||||
|
||||
// ToAPIJson converts the ExError to a json object
|
||||
// (the same object as used in the Output(gin) method)
|
||||
//
|
||||
// Parameters:
|
||||
// - [applyExtendListener]: if false the pkgconfig.ExtendGinOutput / pkgconfig.ExtendGinDataOutput will not be applied
|
||||
// - [includeWrappedErrors]: if false we do not include the recursive/wrapped errors in `__data`
|
||||
// - [includeMetaFields]: if true we also include meta-values (aka from `.Str(key, value).Build()`), needs includeWrappedErrors=true
|
||||
func (ee *ExErr) ToAPIJson(applyExtendListener bool, includeWrappedErrors bool, includeMetaFields bool) langext.H {
|
||||
|
||||
apiOutput := langext.H{
|
||||
"errorid": ee.UniqueID,
|
||||
"message": ee.RecursiveMessage(),
|
||||
"errorcode": ee.RecursiveType().Key,
|
||||
"category": ee.RecursiveCategory().Category,
|
||||
}
|
||||
|
||||
if includeWrappedErrors {
|
||||
apiOutput["__data"] = ee.toJson(0, applyExtendListener, includeMetaFields)
|
||||
}
|
||||
|
||||
if applyExtendListener {
|
||||
pkgconfig.ExtendGinOutput(ee, apiOutput)
|
||||
}
|
||||
|
||||
return apiOutput
|
||||
}
|
||||
|
||||
func (ee *ExErr) Output(g *gin.Context) {
|
||||
|
||||
warnOnPkgConfigNotInitialized()
|
||||
|
||||
var statuscode = http.StatusInternalServerError
|
||||
|
||||
var baseCat = ee.RecursiveCategory()
|
||||
var baseType = ee.RecursiveType()
|
||||
var baseStatuscode = ee.RecursiveStatuscode()
|
||||
|
||||
if baseCat == CatUser {
|
||||
statuscode = http.StatusBadRequest
|
||||
} else if baseCat == CatSystem {
|
||||
statuscode = http.StatusInternalServerError
|
||||
}
|
||||
|
||||
if baseStatuscode != nil {
|
||||
statuscode = *ee.StatusCode
|
||||
} else if baseType.DefaultStatusCode != nil {
|
||||
statuscode = *baseType.DefaultStatusCode
|
||||
}
|
||||
|
||||
ginOutput := ee.ToAPIJson(true, pkgconfig.ExtendedGinOutput, pkgconfig.IncludeMetaInGinOutput)
|
||||
|
||||
g.Render(statuscode, json.GoJsonRender{Data: ginOutput, NilSafeSlices: true, NilSafeMaps: true})
|
||||
}
|
@@ -1,88 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import "fmt"
|
||||
|
||||
// IsType test if the supplied error is of the specified ErrorType.
|
||||
func IsType(err error, errType ErrorType) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
bmerr := FromError(err)
|
||||
for bmerr != nil {
|
||||
if bmerr.Type == errType {
|
||||
return true
|
||||
}
|
||||
bmerr = bmerr.OriginalError
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// IsFrom test if the supplied error stems originally from original
|
||||
func IsFrom(e error, original error) bool {
|
||||
if e == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
//goland:noinspection GoDirectComparisonOfErrors
|
||||
if e == original {
|
||||
return true
|
||||
}
|
||||
|
||||
bmerr := FromError(e)
|
||||
for bmerr == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for curr := bmerr; curr != nil; curr = curr.OriginalError {
|
||||
if curr.Category == CatForeign && curr.Message == original.Error() && curr.WrappedErrType == fmt.Sprintf("%T", original) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// HasSourceMessage tests if the supplied error stems originally from an error with the message msg
|
||||
func HasSourceMessage(e error, msg string) bool {
|
||||
if e == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
bmerr := FromError(e)
|
||||
for bmerr == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for curr := bmerr; curr != nil; curr = curr.OriginalError {
|
||||
if curr.OriginalError == nil && curr.Message == msg {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func MessageMatch(e error, matcher func(string) bool) bool {
|
||||
if e == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if matcher(e.Error()) {
|
||||
return true
|
||||
}
|
||||
|
||||
bmerr := FromError(e)
|
||||
for bmerr == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
for curr := bmerr; curr != nil; curr = curr.OriginalError {
|
||||
if matcher(curr.Message) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
@@ -1,37 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type Method string
|
||||
|
||||
const (
|
||||
MethodOutput Method = "OUTPUT"
|
||||
MethodPrint Method = "PRINT"
|
||||
MethodBuild Method = "BUILD"
|
||||
MethodFatal Method = "FATAL"
|
||||
)
|
||||
|
||||
type Listener = func(method Method, v *ExErr)
|
||||
|
||||
var listenerLock = sync.Mutex{}
|
||||
var listener = make([]Listener, 0)
|
||||
|
||||
func RegisterListener(l Listener) {
|
||||
listenerLock.Lock()
|
||||
defer listenerLock.Unlock()
|
||||
|
||||
listener = append(listener, l)
|
||||
}
|
||||
|
||||
func (b *Builder) CallListener(m Method) {
|
||||
valErr := b.errorData
|
||||
|
||||
listenerLock.Lock()
|
||||
defer listenerLock.Unlock()
|
||||
|
||||
for _, v := range listener {
|
||||
v(m, valErr)
|
||||
}
|
||||
}
|
736
exerr/meta.go
736
exerr/meta.go
@@ -1,736 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/rs/zerolog"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// This is a buffed up map[string]any
|
||||
// we also save type information of the map-values
|
||||
// which allows us to deserialize them back into te correct types later
|
||||
|
||||
type MetaMap map[string]MetaValue
|
||||
|
||||
type metaDataType string
|
||||
|
||||
const (
|
||||
MDTString metaDataType = "String"
|
||||
MDTStringPtr metaDataType = "StringPtr"
|
||||
MDTInt metaDataType = "Int"
|
||||
MDTInt8 metaDataType = "Int8"
|
||||
MDTInt16 metaDataType = "Int16"
|
||||
MDTInt32 metaDataType = "Int32"
|
||||
MDTInt64 metaDataType = "Int64"
|
||||
MDTFloat32 metaDataType = "Float32"
|
||||
MDTFloat64 metaDataType = "Float64"
|
||||
MDTBool metaDataType = "Bool"
|
||||
MDTBytes metaDataType = "Bytes"
|
||||
MDTObjectID metaDataType = "ObjectID"
|
||||
MDTTime metaDataType = "Time"
|
||||
MDTDuration metaDataType = "Duration"
|
||||
MDTStringArray metaDataType = "StringArr"
|
||||
MDTIntArray metaDataType = "IntArr"
|
||||
MDTInt32Array metaDataType = "Int32Arr"
|
||||
MDTID metaDataType = "ID"
|
||||
MDTAny metaDataType = "Interface"
|
||||
MDTNil metaDataType = "Nil"
|
||||
MDTEnum metaDataType = "Enum"
|
||||
)
|
||||
|
||||
type MetaValue struct {
|
||||
DataType metaDataType `json:"dataType"`
|
||||
Value interface{} `json:"value"`
|
||||
}
|
||||
|
||||
type metaValueSerialization struct {
|
||||
DataType metaDataType `bson:"dataType"`
|
||||
Value string `bson:"value"`
|
||||
Raw interface{} `bson:"raw"`
|
||||
}
|
||||
|
||||
func (v MetaValue) SerializeValue() (string, error) {
|
||||
switch v.DataType {
|
||||
case MDTString:
|
||||
return v.Value.(string), nil
|
||||
case MDTID:
|
||||
return v.Value.(IDWrap).Serialize(), nil
|
||||
case MDTAny:
|
||||
return v.Value.(AnyWrap).Serialize(), nil
|
||||
case MDTStringPtr:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "#", nil
|
||||
}
|
||||
r := v.Value.(*string)
|
||||
if r != nil {
|
||||
return "*" + *r, nil
|
||||
} else {
|
||||
return "#", nil
|
||||
}
|
||||
case MDTInt:
|
||||
return strconv.Itoa(v.Value.(int)), nil
|
||||
case MDTInt8:
|
||||
return strconv.FormatInt(int64(v.Value.(int8)), 10), nil
|
||||
case MDTInt16:
|
||||
return strconv.FormatInt(int64(v.Value.(int16)), 10), nil
|
||||
case MDTInt32:
|
||||
return strconv.FormatInt(int64(v.Value.(int32)), 10), nil
|
||||
case MDTInt64:
|
||||
return strconv.FormatInt(v.Value.(int64), 10), nil
|
||||
case MDTFloat32:
|
||||
return strconv.FormatFloat(float64(v.Value.(float32)), 'X', -1, 32), nil
|
||||
case MDTFloat64:
|
||||
return strconv.FormatFloat(v.Value.(float64), 'X', -1, 64), nil
|
||||
case MDTBool:
|
||||
if v.Value.(bool) {
|
||||
return "true", nil
|
||||
} else {
|
||||
return "false", nil
|
||||
}
|
||||
case MDTBytes:
|
||||
return hex.EncodeToString(v.Value.([]byte)), nil
|
||||
case MDTObjectID:
|
||||
return v.Value.(primitive.ObjectID).Hex(), nil
|
||||
case MDTTime:
|
||||
return strconv.FormatInt(v.Value.(time.Time).Unix(), 10) + "|" + strconv.FormatInt(int64(v.Value.(time.Time).Nanosecond()), 10), nil
|
||||
case MDTDuration:
|
||||
return v.Value.(time.Duration).String(), nil
|
||||
case MDTStringArray:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "#", nil
|
||||
}
|
||||
r, err := json.Marshal(v.Value.([]string))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(r), nil
|
||||
case MDTIntArray:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "#", nil
|
||||
}
|
||||
r, err := json.Marshal(v.Value.([]int))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(r), nil
|
||||
case MDTInt32Array:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "#", nil
|
||||
}
|
||||
r, err := json.Marshal(v.Value.([]int32))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(r), nil
|
||||
case MDTNil:
|
||||
return "", nil
|
||||
case MDTEnum:
|
||||
return v.Value.(EnumWrap).Serialize(), nil
|
||||
}
|
||||
return "", errors.New("Unknown type: " + string(v.DataType))
|
||||
}
|
||||
|
||||
func (v MetaValue) ShortString(lim int) string {
|
||||
switch v.DataType {
|
||||
case MDTString:
|
||||
r := strings.ReplaceAll(v.Value.(string), "\r", "")
|
||||
r = strings.ReplaceAll(r, "\n", "\\n")
|
||||
r = strings.ReplaceAll(r, "\t", "\\t")
|
||||
return langext.StrLimit(r, lim, "...")
|
||||
case MDTID:
|
||||
return v.Value.(IDWrap).String()
|
||||
case MDTAny:
|
||||
return v.Value.(AnyWrap).String()
|
||||
case MDTStringPtr:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "<<null>>"
|
||||
}
|
||||
r := langext.CoalesceString(v.Value.(*string), "<<null>>")
|
||||
r = strings.ReplaceAll(r, "\r", "")
|
||||
r = strings.ReplaceAll(r, "\n", "\\n")
|
||||
r = strings.ReplaceAll(r, "\t", "\\t")
|
||||
return langext.StrLimit(r, lim, "...")
|
||||
case MDTInt:
|
||||
return strconv.Itoa(v.Value.(int))
|
||||
case MDTInt8:
|
||||
return strconv.FormatInt(int64(v.Value.(int8)), 10)
|
||||
case MDTInt16:
|
||||
return strconv.FormatInt(int64(v.Value.(int16)), 10)
|
||||
case MDTInt32:
|
||||
return strconv.FormatInt(int64(v.Value.(int32)), 10)
|
||||
case MDTInt64:
|
||||
return strconv.FormatInt(v.Value.(int64), 10)
|
||||
case MDTFloat32:
|
||||
return strconv.FormatFloat(float64(v.Value.(float32)), 'g', 4, 32)
|
||||
case MDTFloat64:
|
||||
return strconv.FormatFloat(v.Value.(float64), 'g', 4, 64)
|
||||
case MDTBool:
|
||||
return fmt.Sprintf("%v", v.Value.(bool))
|
||||
case MDTBytes:
|
||||
return langext.StrLimit(hex.EncodeToString(v.Value.([]byte)), lim, "...")
|
||||
case MDTObjectID:
|
||||
return v.Value.(primitive.ObjectID).Hex()
|
||||
case MDTTime:
|
||||
return v.Value.(time.Time).Format(time.RFC3339)
|
||||
case MDTDuration:
|
||||
return v.Value.(time.Duration).String()
|
||||
case MDTStringArray:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "<<null>>"
|
||||
}
|
||||
r, err := json.Marshal(v.Value.([]string))
|
||||
if err != nil {
|
||||
return "(err)"
|
||||
}
|
||||
return langext.StrLimit(string(r), lim, "...")
|
||||
case MDTIntArray:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "<<null>>"
|
||||
}
|
||||
r, err := json.Marshal(v.Value.([]int))
|
||||
if err != nil {
|
||||
return "(err)"
|
||||
}
|
||||
return langext.StrLimit(string(r), lim, "...")
|
||||
case MDTInt32Array:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "<<null>>"
|
||||
}
|
||||
r, err := json.Marshal(v.Value.([]int32))
|
||||
if err != nil {
|
||||
return "(err)"
|
||||
}
|
||||
return langext.StrLimit(string(r), lim, "...")
|
||||
case MDTNil:
|
||||
return "<<null>>"
|
||||
case MDTEnum:
|
||||
return v.Value.(EnumWrap).String()
|
||||
}
|
||||
return "(err)"
|
||||
}
|
||||
|
||||
func (v MetaValue) Apply(key string, evt *zerolog.Event, limitLen *int) *zerolog.Event {
|
||||
switch v.DataType {
|
||||
case MDTString:
|
||||
if limitLen == nil {
|
||||
return evt.Str(key, v.Value.(string))
|
||||
} else {
|
||||
return evt.Str(key, langext.StrLimit(v.Value.(string), *limitLen, "..."))
|
||||
}
|
||||
case MDTID:
|
||||
return evt.Str(key, v.Value.(IDWrap).Value)
|
||||
case MDTAny:
|
||||
if v.Value.(AnyWrap).IsError {
|
||||
return evt.Str(key, "(err)")
|
||||
} else {
|
||||
if limitLen == nil {
|
||||
return evt.Str(key, v.Value.(AnyWrap).Json)
|
||||
} else {
|
||||
return evt.Str(key, langext.StrLimit(v.Value.(AnyWrap).Json, *limitLen, "..."))
|
||||
}
|
||||
}
|
||||
case MDTStringPtr:
|
||||
if langext.IsNil(v.Value) {
|
||||
return evt.Str(key, "<<null>>")
|
||||
}
|
||||
if limitLen == nil {
|
||||
return evt.Str(key, langext.CoalesceString(v.Value.(*string), "<<null>>"))
|
||||
} else {
|
||||
return evt.Str(key, langext.StrLimit(langext.CoalesceString(v.Value.(*string), "<<null>>"), *limitLen, "..."))
|
||||
}
|
||||
case MDTInt:
|
||||
return evt.Int(key, v.Value.(int))
|
||||
case MDTInt8:
|
||||
return evt.Int8(key, v.Value.(int8))
|
||||
case MDTInt16:
|
||||
return evt.Int16(key, v.Value.(int16))
|
||||
case MDTInt32:
|
||||
return evt.Int32(key, v.Value.(int32))
|
||||
case MDTInt64:
|
||||
return evt.Int64(key, v.Value.(int64))
|
||||
case MDTFloat32:
|
||||
return evt.Float32(key, v.Value.(float32))
|
||||
case MDTFloat64:
|
||||
return evt.Float64(key, v.Value.(float64))
|
||||
case MDTBool:
|
||||
return evt.Bool(key, v.Value.(bool))
|
||||
case MDTBytes:
|
||||
return evt.Bytes(key, v.Value.([]byte))
|
||||
case MDTObjectID:
|
||||
return evt.Str(key, v.Value.(primitive.ObjectID).Hex())
|
||||
case MDTTime:
|
||||
return evt.Time(key, v.Value.(time.Time))
|
||||
case MDTDuration:
|
||||
return evt.Dur(key, v.Value.(time.Duration))
|
||||
case MDTStringArray:
|
||||
if langext.IsNil(v.Value) {
|
||||
return evt.Strs(key, nil)
|
||||
}
|
||||
return evt.Strs(key, v.Value.([]string))
|
||||
case MDTIntArray:
|
||||
if langext.IsNil(v.Value) {
|
||||
return evt.Ints(key, nil)
|
||||
}
|
||||
return evt.Ints(key, v.Value.([]int))
|
||||
case MDTInt32Array:
|
||||
if langext.IsNil(v.Value) {
|
||||
return evt.Ints32(key, nil)
|
||||
}
|
||||
return evt.Ints32(key, v.Value.([]int32))
|
||||
case MDTNil:
|
||||
return evt.Str(key, "<<null>>")
|
||||
case MDTEnum:
|
||||
if v.Value.(EnumWrap).IsNil {
|
||||
return evt.Any(key, nil)
|
||||
} else if v.Value.(EnumWrap).ValueRaw != nil {
|
||||
return evt.Any(key, v.Value.(EnumWrap).ValueRaw)
|
||||
} else {
|
||||
return evt.Str(key, v.Value.(EnumWrap).ValueString)
|
||||
}
|
||||
}
|
||||
return evt.Str(key, "(err)")
|
||||
}
|
||||
|
||||
func (v MetaValue) MarshalJSON() ([]byte, error) {
|
||||
str, err := v.SerializeValue()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return json.Marshal(string(v.DataType) + ":" + str)
|
||||
}
|
||||
|
||||
func (v *MetaValue) UnmarshalJSON(data []byte) error {
|
||||
var str = ""
|
||||
err := json.Unmarshal(data, &str)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
split := strings.SplitN(str, ":", 2)
|
||||
if len(split) != 2 {
|
||||
return errors.New("failed to decode MetaValue: '" + str + "'")
|
||||
}
|
||||
|
||||
return v.Deserialize(split[1], metaDataType(split[0]))
|
||||
}
|
||||
|
||||
func (v MetaValue) MarshalBSON() ([]byte, error) {
|
||||
serval, err := v.SerializeValue()
|
||||
if err != nil {
|
||||
return nil, Wrap(err, "failed to bson-marshal MetaValue (serialize)").Build()
|
||||
}
|
||||
|
||||
// this is an kinda ugly hack - but serialization to mongodb and back can loose the correct type information....
|
||||
bin, err := bson.Marshal(metaValueSerialization{
|
||||
DataType: v.DataType,
|
||||
Value: serval,
|
||||
Raw: v.Value,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, Wrap(err, "failed to bson-marshal MetaValue (marshal)").Build()
|
||||
}
|
||||
|
||||
return bin, nil
|
||||
}
|
||||
|
||||
func (v *MetaValue) UnmarshalBSON(bytes []byte) error {
|
||||
var serval metaValueSerialization
|
||||
err := bson.Unmarshal(bytes, &serval)
|
||||
if err != nil {
|
||||
return Wrap(err, "failed to bson-unmarshal MetaValue (unmarshal)").Build()
|
||||
}
|
||||
|
||||
err = v.Deserialize(serval.Value, serval.DataType)
|
||||
if err != nil {
|
||||
return Wrap(err, "failed to deserialize MetaValue from bson").Str("raw", serval.Value).Build()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *MetaValue) Deserialize(value string, datatype metaDataType) error {
|
||||
switch datatype {
|
||||
case MDTString:
|
||||
v.Value = value
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTID:
|
||||
v.Value = deserializeIDWrap(value)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTAny:
|
||||
v.Value = deserializeAnyWrap(value)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTStringPtr:
|
||||
if len(value) <= 0 || (value[0] != '*' && value[0] != '#') {
|
||||
return errors.New("Invalid StringPtr: " + value)
|
||||
} else if value == "#" {
|
||||
v.Value = nil
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
} else {
|
||||
v.Value = langext.Ptr(value[1:])
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
}
|
||||
case MDTInt:
|
||||
pv, err := strconv.ParseInt(value, 10, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = int(pv)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTInt8:
|
||||
pv, err := strconv.ParseInt(value, 10, 8)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = int8(pv)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTInt16:
|
||||
pv, err := strconv.ParseInt(value, 10, 16)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = int16(pv)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTInt32:
|
||||
pv, err := strconv.ParseInt(value, 10, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = int32(pv)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTInt64:
|
||||
pv, err := strconv.ParseInt(value, 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = pv
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTFloat32:
|
||||
pv, err := strconv.ParseFloat(value, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = float32(pv)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTFloat64:
|
||||
pv, err := strconv.ParseFloat(value, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = pv
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTBool:
|
||||
if value == "true" {
|
||||
v.Value = true
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
}
|
||||
if value == "false" {
|
||||
v.Value = false
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
}
|
||||
return errors.New("invalid bool value: " + value)
|
||||
case MDTBytes:
|
||||
r, err := hex.DecodeString(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = r
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTObjectID:
|
||||
r, err := primitive.ObjectIDFromHex(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = r
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTTime:
|
||||
ps := strings.Split(value, "|")
|
||||
if len(ps) != 2 {
|
||||
return errors.New("invalid time.time: " + value)
|
||||
}
|
||||
p1, err := strconv.ParseInt(ps[0], 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p2, err := strconv.ParseInt(ps[1], 10, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = time.Unix(p1, p2)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTDuration:
|
||||
r, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = r
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTStringArray:
|
||||
if value == "#" {
|
||||
v.Value = nil
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
}
|
||||
pj := make([]string, 0)
|
||||
err := json.Unmarshal([]byte(value), &pj)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = pj
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTIntArray:
|
||||
if value == "#" {
|
||||
v.Value = nil
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
}
|
||||
pj := make([]int, 0)
|
||||
err := json.Unmarshal([]byte(value), &pj)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = pj
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTInt32Array:
|
||||
if value == "#" {
|
||||
v.Value = nil
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
}
|
||||
pj := make([]int32, 0)
|
||||
err := json.Unmarshal([]byte(value), &pj)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Value = pj
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTNil:
|
||||
v.Value = nil
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
case MDTEnum:
|
||||
v.Value = deserializeEnumWrap(value)
|
||||
v.DataType = datatype
|
||||
return nil
|
||||
}
|
||||
return errors.New("Unknown type: " + string(datatype))
|
||||
}
|
||||
|
||||
func (v MetaValue) ValueString() string {
|
||||
switch v.DataType {
|
||||
case MDTString:
|
||||
return v.Value.(string)
|
||||
case MDTID:
|
||||
return v.Value.(IDWrap).String()
|
||||
case MDTAny:
|
||||
return v.Value.(AnyWrap).String()
|
||||
case MDTStringPtr:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "<<null>>"
|
||||
}
|
||||
return langext.CoalesceString(v.Value.(*string), "<<null>>")
|
||||
case MDTInt:
|
||||
return strconv.Itoa(v.Value.(int))
|
||||
case MDTInt8:
|
||||
return strconv.FormatInt(int64(v.Value.(int8)), 10)
|
||||
case MDTInt16:
|
||||
return strconv.FormatInt(int64(v.Value.(int16)), 10)
|
||||
case MDTInt32:
|
||||
return strconv.FormatInt(int64(v.Value.(int32)), 10)
|
||||
case MDTInt64:
|
||||
return strconv.FormatInt(v.Value.(int64), 10)
|
||||
case MDTFloat32:
|
||||
return strconv.FormatFloat(float64(v.Value.(float32)), 'g', 4, 32)
|
||||
case MDTFloat64:
|
||||
return strconv.FormatFloat(v.Value.(float64), 'g', 4, 64)
|
||||
case MDTBool:
|
||||
return fmt.Sprintf("%v", v.Value.(bool))
|
||||
case MDTBytes:
|
||||
return hex.EncodeToString(v.Value.([]byte))
|
||||
case MDTObjectID:
|
||||
return v.Value.(primitive.ObjectID).Hex()
|
||||
case MDTTime:
|
||||
return v.Value.(time.Time).Format(time.RFC3339Nano)
|
||||
case MDTDuration:
|
||||
return v.Value.(time.Duration).String()
|
||||
case MDTStringArray:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "<<null>>"
|
||||
}
|
||||
r, err := json.MarshalIndent(v.Value.([]string), "", " ")
|
||||
if err != nil {
|
||||
return "(err)"
|
||||
}
|
||||
return string(r)
|
||||
case MDTIntArray:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "<<null>>"
|
||||
}
|
||||
r, err := json.MarshalIndent(v.Value.([]int), "", " ")
|
||||
if err != nil {
|
||||
return "(err)"
|
||||
}
|
||||
return string(r)
|
||||
case MDTInt32Array:
|
||||
if langext.IsNil(v.Value) {
|
||||
return "<<null>>"
|
||||
}
|
||||
r, err := json.MarshalIndent(v.Value.([]int32), "", " ")
|
||||
if err != nil {
|
||||
return "(err)"
|
||||
}
|
||||
return string(r)
|
||||
case MDTNil:
|
||||
return "<<null>>"
|
||||
case MDTEnum:
|
||||
return v.Value.(EnumWrap).String()
|
||||
}
|
||||
return "(err)"
|
||||
}
|
||||
|
||||
// rawValueForJson returns most-of-the-time the `Value` field
|
||||
// but for some datatyes we do special processing
|
||||
// all, so we can pluck the output value in json.Marshal without any suprises
|
||||
func (v MetaValue) rawValueForJson() any {
|
||||
if v.DataType == MDTAny {
|
||||
if v.Value.(AnyWrap).IsNil {
|
||||
return nil
|
||||
}
|
||||
if v.Value.(AnyWrap).IsError {
|
||||
return bson.M{"@error": true}
|
||||
}
|
||||
jsonobj := primitive.M{}
|
||||
jsonarr := primitive.A{}
|
||||
if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonobj); err == nil {
|
||||
return jsonobj
|
||||
} else if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonarr); err == nil {
|
||||
return jsonarr
|
||||
} else {
|
||||
return bson.M{"type": v.Value.(AnyWrap).Type, "data": v.Value.(AnyWrap).Json}
|
||||
}
|
||||
}
|
||||
if v.DataType == MDTID {
|
||||
if v.Value.(IDWrap).IsNil {
|
||||
return nil
|
||||
}
|
||||
return v.Value.(IDWrap).Value
|
||||
}
|
||||
if v.DataType == MDTBytes {
|
||||
return hex.EncodeToString(v.Value.([]byte))
|
||||
}
|
||||
if v.DataType == MDTDuration {
|
||||
return v.Value.(time.Duration).String()
|
||||
}
|
||||
if v.DataType == MDTTime {
|
||||
return v.Value.(time.Time).Format(time.RFC3339Nano)
|
||||
}
|
||||
if v.DataType == MDTObjectID {
|
||||
return v.Value.(primitive.ObjectID).Hex()
|
||||
}
|
||||
if v.DataType == MDTNil {
|
||||
return nil
|
||||
}
|
||||
if v.DataType == MDTEnum {
|
||||
if v.Value.(EnumWrap).IsNil {
|
||||
return nil
|
||||
}
|
||||
if v.Value.(EnumWrap).ValueRaw != nil {
|
||||
return v.Value.(EnumWrap).ValueRaw
|
||||
}
|
||||
return v.Value.(EnumWrap).ValueString
|
||||
}
|
||||
return v.Value
|
||||
}
|
||||
|
||||
func (mm MetaMap) FormatOneLine(singleMaxLen int) string {
|
||||
r := ""
|
||||
|
||||
i := 0
|
||||
for key, val := range mm {
|
||||
if i > 0 {
|
||||
r += ", "
|
||||
}
|
||||
|
||||
r += "\"" + key + "\""
|
||||
r += ": "
|
||||
r += "\"" + val.ShortString(singleMaxLen) + "\""
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (mm MetaMap) FormatMultiLine(indentFront string, indentKeys string, maxLenValue int) string {
|
||||
r := ""
|
||||
|
||||
r += indentFront + "{" + "\n"
|
||||
for key, val := range mm {
|
||||
if key == "gin.body" {
|
||||
continue
|
||||
}
|
||||
|
||||
r += indentFront
|
||||
r += indentKeys
|
||||
r += "\"" + key + "\""
|
||||
r += ": "
|
||||
r += "\"" + val.ShortString(maxLenValue) + "\""
|
||||
r += ",\n"
|
||||
}
|
||||
r += indentFront + "}"
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (mm MetaMap) Any() bool {
|
||||
return len(mm) > 0
|
||||
}
|
||||
|
||||
func (mm MetaMap) Apply(evt *zerolog.Event, limitLen *int) *zerolog.Event {
|
||||
for key, val := range mm {
|
||||
evt = val.Apply(key, evt, limitLen)
|
||||
}
|
||||
return evt
|
||||
}
|
||||
|
||||
func (mm MetaMap) add(key string, mdtype metaDataType, val interface{}) {
|
||||
if _, ok := mm[key]; !ok {
|
||||
mm[key] = MetaValue{DataType: mdtype, Value: val}
|
||||
return
|
||||
}
|
||||
for i := 2; ; i++ {
|
||||
realkey := key + "-" + strconv.Itoa(i)
|
||||
if _, ok := mm[realkey]; !ok {
|
||||
mm[realkey] = MetaValue{DataType: mdtype, Value: val}
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,14 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
func callername(skip int) string {
|
||||
pc := make([]uintptr, 15)
|
||||
n := runtime.Callers(skip+2, pc)
|
||||
frames := runtime.CallersFrames(pc[:n])
|
||||
frame, _ := frames.Next()
|
||||
return fmt.Sprintf("%s:%d %s", frame.File, frame.Line, frame.Function)
|
||||
}
|
@@ -1,189 +0,0 @@
|
||||
package exerr
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/rs/zerolog/log"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/enums"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"strings"
|
||||
)
|
||||
|
||||
//
|
||||
// These are wrapper objects, because for some metadata-types we need to serialize a bit more complex data
|
||||
// (eg thy actual type for ID objects, or the json representation for any types)
|
||||
//
|
||||
|
||||
type IDWrap struct {
|
||||
Type string
|
||||
Value string
|
||||
IsNil bool
|
||||
}
|
||||
|
||||
func newIDWrap(val fmt.Stringer) IDWrap {
|
||||
t := fmt.Sprintf("%T", val)
|
||||
arr := strings.Split(t, ".")
|
||||
if len(arr) > 0 {
|
||||
t = arr[len(arr)-1]
|
||||
}
|
||||
|
||||
if langext.IsNil(val) {
|
||||
return IDWrap{Type: t, Value: "", IsNil: true}
|
||||
}
|
||||
|
||||
v := val.String()
|
||||
return IDWrap{Type: t, Value: v, IsNil: false}
|
||||
}
|
||||
|
||||
func (w IDWrap) Serialize() string {
|
||||
if w.IsNil {
|
||||
return "!nil" + ":" + w.Type
|
||||
}
|
||||
return w.Type + ":" + w.Value
|
||||
}
|
||||
|
||||
func (w IDWrap) String() string {
|
||||
if w.IsNil {
|
||||
return w.Type + "<<nil>>"
|
||||
}
|
||||
return w.Type + "(" + w.Value + ")"
|
||||
}
|
||||
|
||||
func deserializeIDWrap(v string) IDWrap {
|
||||
r := strings.SplitN(v, ":", 2)
|
||||
|
||||
if len(r) == 2 && r[0] == "!nil" {
|
||||
return IDWrap{Type: r[1], Value: v, IsNil: true}
|
||||
}
|
||||
|
||||
if len(r) == 0 {
|
||||
return IDWrap{}
|
||||
} else if len(r) == 1 {
|
||||
return IDWrap{Type: "", Value: v, IsNil: false}
|
||||
} else {
|
||||
return IDWrap{Type: r[0], Value: r[1], IsNil: false}
|
||||
}
|
||||
}
|
||||
|
||||
type AnyWrap struct {
|
||||
Type string
|
||||
Json string
|
||||
IsError bool
|
||||
IsNil bool
|
||||
}
|
||||
|
||||
func newAnyWrap(val any) (result AnyWrap) {
|
||||
result = AnyWrap{Type: "", Json: "", IsError: true, IsNil: false} // ensure a return in case of recover()
|
||||
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
// send error should never crash our program
|
||||
log.Error().Interface("err", err).Msg("Panic while trying to marshal anywrap ( bmerror.Interface )")
|
||||
}
|
||||
}()
|
||||
|
||||
t := fmt.Sprintf("%T", val)
|
||||
|
||||
if langext.IsNil(val) {
|
||||
return AnyWrap{Type: t, Json: "", IsError: false, IsNil: true}
|
||||
}
|
||||
|
||||
j, err := json.Marshal(val)
|
||||
if err == nil {
|
||||
return AnyWrap{Type: t, Json: string(j), IsError: false, IsNil: false}
|
||||
} else {
|
||||
return AnyWrap{Type: t, Json: "", IsError: true, IsNil: false}
|
||||
}
|
||||
}
|
||||
|
||||
func (w AnyWrap) Serialize() string {
|
||||
if w.IsError {
|
||||
return "ERR" + ":" + w.Type + ":" + w.Json
|
||||
} else if w.IsNil {
|
||||
return "NIL" + ":" + w.Type + ":" + w.Json
|
||||
} else {
|
||||
return "OK" + ":" + w.Type + ":" + w.Json
|
||||
}
|
||||
}
|
||||
|
||||
func (w AnyWrap) String() string {
|
||||
if w.IsError {
|
||||
return "(error)"
|
||||
} else if w.IsNil {
|
||||
return "(nil)"
|
||||
} else {
|
||||
return w.Json
|
||||
}
|
||||
}
|
||||
|
||||
func deserializeAnyWrap(v string) AnyWrap {
|
||||
r := strings.SplitN(v, ":", 3)
|
||||
if len(r) != 3 {
|
||||
return AnyWrap{IsError: true, Type: "", Json: "", IsNil: false}
|
||||
} else {
|
||||
if r[0] == "OK" {
|
||||
return AnyWrap{IsError: false, Type: r[1], Json: r[2], IsNil: false}
|
||||
} else if r[0] == "ERR" {
|
||||
return AnyWrap{IsError: true, Type: r[1], Json: r[2], IsNil: false}
|
||||
} else if r[0] == "NIL" {
|
||||
return AnyWrap{IsError: false, Type: r[1], Json: "", IsNil: true}
|
||||
} else {
|
||||
return AnyWrap{IsError: true, Type: "", Json: "", IsNil: false}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type EnumWrap struct {
|
||||
Type string
|
||||
ValueString string
|
||||
ValueRaw enums.Enum // `ValueRaw` is lost during serialization roundtrip
|
||||
IsNil bool
|
||||
}
|
||||
|
||||
func newEnumWrap(val enums.Enum) EnumWrap {
|
||||
t := fmt.Sprintf("%T", val)
|
||||
arr := strings.Split(t, ".")
|
||||
if len(arr) > 0 {
|
||||
t = arr[len(arr)-1]
|
||||
}
|
||||
|
||||
if langext.IsNil(val) {
|
||||
return EnumWrap{Type: t, ValueString: "", ValueRaw: val, IsNil: true}
|
||||
}
|
||||
|
||||
if enumstr, ok := val.(enums.StringEnum); ok {
|
||||
return EnumWrap{Type: t, ValueString: enumstr.String(), ValueRaw: val, IsNil: false}
|
||||
}
|
||||
|
||||
return EnumWrap{Type: t, ValueString: fmt.Sprintf("%v", val), ValueRaw: val, IsNil: false}
|
||||
}
|
||||
|
||||
func (w EnumWrap) Serialize() string {
|
||||
if w.IsNil {
|
||||
return "!nil" + ":" + w.Type
|
||||
}
|
||||
return w.Type + ":" + w.ValueString
|
||||
}
|
||||
|
||||
func (w EnumWrap) String() string {
|
||||
if w.IsNil {
|
||||
return w.Type + "<<nil>>"
|
||||
}
|
||||
return "[" + w.Type + "] " + w.ValueString
|
||||
}
|
||||
|
||||
func deserializeEnumWrap(v string) EnumWrap {
|
||||
r := strings.SplitN(v, ":", 2)
|
||||
|
||||
if len(r) == 2 && r[0] == "!nil" {
|
||||
return EnumWrap{Type: r[1], ValueString: v, ValueRaw: nil, IsNil: true}
|
||||
}
|
||||
|
||||
if len(r) == 0 {
|
||||
return EnumWrap{}
|
||||
} else if len(r) == 1 {
|
||||
return EnumWrap{Type: "", ValueString: v, ValueRaw: nil, IsNil: false}
|
||||
} else {
|
||||
return EnumWrap{Type: r[0], ValueString: r[1], ValueRaw: nil, IsNil: false}
|
||||
}
|
||||
}
|
@@ -1,36 +0,0 @@
|
||||
package fsext
|
||||
|
||||
import "os"
|
||||
|
||||
func PathExists(fp string) (bool, error) {
|
||||
_, err := os.Stat(fp)
|
||||
if err == nil {
|
||||
return true, nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
||||
func FileExists(fp string) (bool, error) {
|
||||
stat, err := os.Stat(fp)
|
||||
if err == nil {
|
||||
return !stat.IsDir(), nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
||||
func DirectoryExists(fp string) (bool, error) {
|
||||
stat, err := os.Stat(fp)
|
||||
if err == nil {
|
||||
return stat.IsDir(), nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
}
|
||||
return false, err
|
||||
}
|
@@ -1,59 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/gin-gonic/gin"
|
||||
"time"
|
||||
)
|
||||
|
||||
type AppContext struct {
|
||||
inner context.Context
|
||||
cancelFunc context.CancelFunc
|
||||
cancelled bool
|
||||
GinContext *gin.Context
|
||||
}
|
||||
|
||||
func CreateAppContext(g *gin.Context, innerCtx context.Context, cancelFn context.CancelFunc) *AppContext {
|
||||
for key, value := range g.Keys {
|
||||
innerCtx = context.WithValue(innerCtx, key, value)
|
||||
}
|
||||
return &AppContext{
|
||||
inner: innerCtx,
|
||||
cancelFunc: cancelFn,
|
||||
cancelled: false,
|
||||
GinContext: g,
|
||||
}
|
||||
}
|
||||
|
||||
func (ac *AppContext) Deadline() (deadline time.Time, ok bool) {
|
||||
return ac.inner.Deadline()
|
||||
}
|
||||
|
||||
func (ac *AppContext) Done() <-chan struct{} {
|
||||
return ac.inner.Done()
|
||||
}
|
||||
|
||||
func (ac *AppContext) Err() error {
|
||||
return ac.inner.Err()
|
||||
}
|
||||
|
||||
func (ac *AppContext) Value(key any) any {
|
||||
return ac.inner.Value(key)
|
||||
}
|
||||
|
||||
func (ac *AppContext) Set(key, value any) {
|
||||
ac.inner = context.WithValue(ac.inner, key, value)
|
||||
}
|
||||
|
||||
func (ac *AppContext) Cancel() {
|
||||
ac.cancelled = true
|
||||
ac.cancelFunc()
|
||||
}
|
||||
|
||||
func (ac *AppContext) RequestURI() string {
|
||||
if ac.GinContext != nil && ac.GinContext.Request != nil {
|
||||
return ac.GinContext.Request.Method + " :: " + ac.GinContext.Request.RequestURI
|
||||
} else {
|
||||
return ""
|
||||
}
|
||||
}
|
@@ -1,23 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func RedirectFound(newuri string) WHandlerFunc {
|
||||
return func(pctx PreContext) HTTPResponse {
|
||||
return Redirect(http.StatusFound, newuri)
|
||||
}
|
||||
}
|
||||
|
||||
func RedirectTemporary(newuri string) WHandlerFunc {
|
||||
return func(pctx PreContext) HTTPResponse {
|
||||
return Redirect(http.StatusTemporaryRedirect, newuri)
|
||||
}
|
||||
}
|
||||
|
||||
func RedirectPermanent(newuri string) WHandlerFunc {
|
||||
return func(pctx PreContext) HTTPResponse {
|
||||
return Redirect(http.StatusPermanentRedirect, newuri)
|
||||
}
|
||||
}
|
@@ -1,12 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
|
||||
)
|
||||
|
||||
func BodyBuffer(g *gin.Context) {
|
||||
if g.Request.Body != nil {
|
||||
g.Request.Body = dataext.NewBufferedReadCloser(g.Request.Body)
|
||||
}
|
||||
}
|
@@ -1,21 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func CorsMiddleware() gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Methods", "OPTIONS, GET, POST, PUT, PATCH, DELETE, COUNT")
|
||||
|
||||
if c.Request.Method == "OPTIONS" {
|
||||
c.AbortWithStatus(http.StatusOK)
|
||||
} else {
|
||||
c.Next()
|
||||
}
|
||||
}
|
||||
}
|
176
ginext/engine.go
176
ginext/engine.go
@@ -1,176 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/rs/zerolog/log"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/mathext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||
"net"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type GinWrapper struct {
|
||||
engine *gin.Engine
|
||||
SuppressGinLogs bool
|
||||
|
||||
allowCors bool
|
||||
ginDebug bool
|
||||
bufferBody bool
|
||||
requestTimeout time.Duration
|
||||
|
||||
routeSpecs []ginRouteSpec
|
||||
}
|
||||
|
||||
type ginRouteSpec struct {
|
||||
Method string
|
||||
URL string
|
||||
Middlewares []string
|
||||
Handler string
|
||||
}
|
||||
|
||||
// NewEngine creates a new (wrapped) ginEngine
|
||||
// Parameters are:
|
||||
// - [allowCors] Add cors handler to allow all CORS requests on the default http methods
|
||||
// - [ginDebug] Set gin.debug to true (adds more logs)
|
||||
// - [bufferBody] Buffers the input body stream, this way the ginext error handler can later include the whole request body
|
||||
// - [timeout] The default handler timeout
|
||||
func NewEngine(allowCors bool, ginDebug bool, bufferBody bool, timeout time.Duration) *GinWrapper {
|
||||
engine := gin.New()
|
||||
|
||||
wrapper := &GinWrapper{
|
||||
engine: engine,
|
||||
SuppressGinLogs: false,
|
||||
allowCors: allowCors,
|
||||
ginDebug: ginDebug,
|
||||
bufferBody: bufferBody,
|
||||
requestTimeout: timeout,
|
||||
}
|
||||
|
||||
engine.RedirectFixedPath = false
|
||||
engine.RedirectTrailingSlash = false
|
||||
|
||||
if allowCors {
|
||||
engine.Use(CorsMiddleware())
|
||||
}
|
||||
|
||||
// do not debug-print routes
|
||||
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
|
||||
|
||||
if !ginDebug {
|
||||
gin.SetMode(gin.ReleaseMode)
|
||||
|
||||
ginlogger := gin.Logger()
|
||||
engine.Use(func(context *gin.Context) {
|
||||
if !wrapper.SuppressGinLogs {
|
||||
ginlogger(context)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
gin.SetMode(gin.DebugMode)
|
||||
}
|
||||
|
||||
return wrapper
|
||||
}
|
||||
|
||||
func (w *GinWrapper) ListenAndServeHTTP(addr string, postInit func(port string)) (chan error, *http.Server) {
|
||||
|
||||
w.DebugPrintRoutes()
|
||||
|
||||
httpserver := &http.Server{
|
||||
Addr: addr,
|
||||
Handler: w.engine,
|
||||
}
|
||||
|
||||
errChan := make(chan error)
|
||||
|
||||
go func() {
|
||||
|
||||
ln, err := net.Listen("tcp", httpserver.Addr)
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
_, port, err := net.SplitHostPort(ln.Addr().String())
|
||||
if err != nil {
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
|
||||
log.Info().Str("address", httpserver.Addr).Msg("HTTP-Server started on http://localhost:" + port)
|
||||
|
||||
if postInit != nil {
|
||||
postInit(port) // the net.Listener a few lines above is at this point actually already buffering requests
|
||||
}
|
||||
|
||||
errChan <- httpserver.Serve(ln)
|
||||
}()
|
||||
|
||||
return errChan, httpserver
|
||||
}
|
||||
|
||||
func (w *GinWrapper) DebugPrintRoutes() {
|
||||
if !w.ginDebug {
|
||||
return
|
||||
}
|
||||
|
||||
lines := make([][4]string, 0)
|
||||
|
||||
pad := [4]int{0, 0, 0, 0}
|
||||
|
||||
for _, spec := range w.routeSpecs {
|
||||
|
||||
line := [4]string{
|
||||
spec.Method,
|
||||
spec.URL,
|
||||
strings.Join(langext.ArrMap(spec.Middlewares, w.cleanMiddlewareName), " -> "),
|
||||
spec.Handler,
|
||||
}
|
||||
|
||||
lines = append(lines, line)
|
||||
|
||||
pad[0] = mathext.Max(pad[0], len(line[0]))
|
||||
pad[1] = mathext.Max(pad[1], len(line[1]))
|
||||
pad[2] = mathext.Max(pad[2], len(line[2]))
|
||||
pad[3] = mathext.Max(pad[3], len(line[3]))
|
||||
}
|
||||
|
||||
for _, line := range lines {
|
||||
|
||||
fmt.Printf("Gin-Route: %s %s --> %s --> %s\n",
|
||||
langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2),
|
||||
langext.StrPadRight(line[1], " ", pad[1]),
|
||||
langext.StrPadRight(line[2], " ", pad[2]),
|
||||
langext.StrPadRight(line[3], " ", pad[3]))
|
||||
}
|
||||
}
|
||||
|
||||
func (w *GinWrapper) cleanMiddlewareName(fname string) string {
|
||||
|
||||
funcSuffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`))
|
||||
if match, ok := funcSuffix.MatchFirst(fname); ok {
|
||||
fname = fname[:len(fname)-match.FullMatch().Length()]
|
||||
}
|
||||
|
||||
if strings.HasSuffix(fname, ".(*GinRoutesWrapper).WithJSONFilter") {
|
||||
fname = "[JSONFilter]"
|
||||
}
|
||||
|
||||
if fname == "ginext.BodyBuffer" {
|
||||
fname = "[BodyBuffer]"
|
||||
}
|
||||
|
||||
skipPrefixes := []string{"api.(*Handler).", "api."}
|
||||
for _, pfx := range skipPrefixes {
|
||||
if strings.HasPrefix(fname, pfx) {
|
||||
fname = fname[len(pfx):]
|
||||
}
|
||||
}
|
||||
|
||||
return fname
|
||||
}
|
@@ -1,52 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
)
|
||||
|
||||
type WHandlerFunc func(PreContext) HTTPResponse
|
||||
|
||||
func Wrap(w *GinWrapper, fn WHandlerFunc) gin.HandlerFunc {
|
||||
|
||||
return func(g *gin.Context) {
|
||||
|
||||
reqctx := g.Request.Context()
|
||||
|
||||
pctx := PreContext{
|
||||
wrapper: w,
|
||||
ginCtx: g,
|
||||
persistantData: &preContextData{},
|
||||
}
|
||||
|
||||
wrap, stackTrace, panicObj := callPanicSafe(fn, pctx)
|
||||
if panicObj != nil {
|
||||
|
||||
fmt.Printf("\n======== ======== STACKTRACE ======== ========\n%s\n======== ======== ======== ========\n\n", stackTrace)
|
||||
|
||||
err := exerr.
|
||||
New(exerr.TypePanic, "Panic occured (in gin handler)").
|
||||
Any("panicObj", panicObj).
|
||||
Str("trace", stackTrace).
|
||||
Build()
|
||||
|
||||
wrap = Error(err)
|
||||
}
|
||||
|
||||
if g.Writer.Written() {
|
||||
panic("Writing in WrapperFunc is not supported")
|
||||
}
|
||||
|
||||
if pctx.persistantData.sessionObj != nil {
|
||||
err := pctx.persistantData.sessionObj.Finish(reqctx, wrap)
|
||||
if err != nil {
|
||||
wrap = Error(exerr.Wrap(err, "Failed to finish session").Any("originalResponse", wrap).Build())
|
||||
}
|
||||
}
|
||||
|
||||
if reqctx.Err() == nil {
|
||||
wrap.Write(g)
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,190 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/gin-gonic/gin/binding"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"io"
|
||||
"runtime/debug"
|
||||
"time"
|
||||
)
|
||||
|
||||
type PreContext struct {
|
||||
ginCtx *gin.Context
|
||||
wrapper *GinWrapper
|
||||
uri any
|
||||
query any
|
||||
body any
|
||||
rawbody *[]byte
|
||||
form any
|
||||
header any
|
||||
timeout *time.Duration
|
||||
persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func
|
||||
}
|
||||
|
||||
type preContextData struct {
|
||||
sessionObj SessionObject
|
||||
}
|
||||
|
||||
func (pctx *PreContext) URI(uri any) *PreContext {
|
||||
pctx.uri = uri
|
||||
return pctx
|
||||
}
|
||||
|
||||
func (pctx *PreContext) Query(query any) *PreContext {
|
||||
pctx.query = query
|
||||
return pctx
|
||||
}
|
||||
|
||||
func (pctx *PreContext) Body(body any) *PreContext {
|
||||
pctx.body = body
|
||||
return pctx
|
||||
}
|
||||
|
||||
func (pctx *PreContext) RawBody(rawbody *[]byte) *PreContext {
|
||||
pctx.rawbody = rawbody
|
||||
return pctx
|
||||
}
|
||||
|
||||
func (pctx *PreContext) Form(form any) *PreContext {
|
||||
pctx.form = form
|
||||
return pctx
|
||||
}
|
||||
|
||||
func (pctx *PreContext) Header(header any) *PreContext {
|
||||
pctx.header = header
|
||||
return pctx
|
||||
}
|
||||
|
||||
func (pctx *PreContext) WithTimeout(to time.Duration) *PreContext {
|
||||
pctx.timeout = &to
|
||||
return pctx
|
||||
}
|
||||
|
||||
func (pctx *PreContext) WithSession(sessionObj SessionObject) *PreContext {
|
||||
pctx.persistantData.sessionObj = sessionObj
|
||||
return pctx
|
||||
}
|
||||
|
||||
func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
|
||||
if pctx.uri != nil {
|
||||
if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil {
|
||||
err = exerr.Wrap(err, "Failed to read uri").
|
||||
WithType(exerr.TypeBindFailURI).
|
||||
Str("struct_type", fmt.Sprintf("%T", pctx.uri)).
|
||||
Build()
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
if pctx.query != nil {
|
||||
if err := pctx.ginCtx.ShouldBindQuery(pctx.query); err != nil {
|
||||
err = exerr.Wrap(err, "Failed to read query").
|
||||
WithType(exerr.TypeBindFailQuery).
|
||||
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
|
||||
Build()
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
if pctx.body != nil {
|
||||
if pctx.ginCtx.ContentType() == "application/json" {
|
||||
if err := pctx.ginCtx.ShouldBindJSON(pctx.body); err != nil {
|
||||
err = exerr.Wrap(err, "Failed to read json-body").
|
||||
WithType(exerr.TypeBindFailJSON).
|
||||
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
|
||||
Build()
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
} else {
|
||||
err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
|
||||
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
|
||||
Build()
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
if pctx.rawbody != nil {
|
||||
if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok {
|
||||
v, err := brc.BufferedAll()
|
||||
if err != nil {
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
*pctx.rawbody = v
|
||||
} else {
|
||||
buf := &bytes.Buffer{}
|
||||
_, err := io.Copy(buf, pctx.ginCtx.Request.Body)
|
||||
if err != nil {
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
*pctx.rawbody = buf.Bytes()
|
||||
}
|
||||
}
|
||||
|
||||
if pctx.form != nil {
|
||||
if pctx.ginCtx.ContentType() == "multipart/form-data" {
|
||||
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
|
||||
err = exerr.Wrap(err, "Failed to read multipart-form").
|
||||
WithType(exerr.TypeBindFailFormData).
|
||||
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
|
||||
Build()
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
} else if pctx.ginCtx.ContentType() == "application/x-www-form-urlencoded" {
|
||||
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
|
||||
err = exerr.Wrap(err, "Failed to read urlencoded-form").
|
||||
WithType(exerr.TypeBindFailFormData).
|
||||
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
|
||||
Build()
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
} else {
|
||||
err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
|
||||
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
|
||||
Build()
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
if pctx.header != nil {
|
||||
if err := pctx.ginCtx.ShouldBindHeader(pctx.header); err != nil {
|
||||
err = exerr.Wrap(err, "Failed to read header").
|
||||
WithType(exerr.TypeBindFailHeader).
|
||||
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
|
||||
Build()
|
||||
return nil, nil, langext.Ptr(Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout))
|
||||
|
||||
if pctx.persistantData.sessionObj != nil {
|
||||
err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, ictx)
|
||||
if err != nil {
|
||||
cancel()
|
||||
return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build()))
|
||||
}
|
||||
}
|
||||
|
||||
actx := CreateAppContext(pctx.ginCtx, ictx, cancel)
|
||||
|
||||
return actx, pctx.ginCtx, nil
|
||||
}
|
||||
|
||||
func callPanicSafe(fn WHandlerFunc, pctx PreContext) (res HTTPResponse, stackTrace string, panicObj any) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
res = nil
|
||||
stackTrace = string(debug.Stack())
|
||||
panicObj = rec
|
||||
}
|
||||
}()
|
||||
|
||||
res = fn(pctx)
|
||||
return res, "", nil
|
||||
}
|
@@ -1,253 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
|
||||
)
|
||||
|
||||
type headerval struct {
|
||||
Key string
|
||||
Val string
|
||||
}
|
||||
|
||||
type HTTPResponse interface {
|
||||
Write(g *gin.Context)
|
||||
WithHeader(k string, v string) HTTPResponse
|
||||
IsSuccess() bool
|
||||
}
|
||||
|
||||
type jsonHTTPResponse struct {
|
||||
statusCode int
|
||||
data any
|
||||
headers []headerval
|
||||
}
|
||||
|
||||
func (j jsonHTTPResponse) Write(g *gin.Context) {
|
||||
for _, v := range j.headers {
|
||||
g.Header(v.Key, v.Val)
|
||||
}
|
||||
var f *string
|
||||
if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" {
|
||||
f = &jsonfilter
|
||||
}
|
||||
g.Render(j.statusCode, json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f})
|
||||
}
|
||||
|
||||
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||
j.headers = append(j.headers, headerval{k, v})
|
||||
return j
|
||||
}
|
||||
|
||||
func (j jsonHTTPResponse) IsSuccess() bool {
|
||||
return j.statusCode >= 200 && j.statusCode <= 399
|
||||
}
|
||||
|
||||
type emptyHTTPResponse struct {
|
||||
statusCode int
|
||||
headers []headerval
|
||||
}
|
||||
|
||||
func (j emptyHTTPResponse) Write(g *gin.Context) {
|
||||
for _, v := range j.headers {
|
||||
g.Header(v.Key, v.Val)
|
||||
}
|
||||
g.Status(j.statusCode)
|
||||
}
|
||||
|
||||
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||
j.headers = append(j.headers, headerval{k, v})
|
||||
return j
|
||||
}
|
||||
|
||||
func (j emptyHTTPResponse) IsSuccess() bool {
|
||||
return j.statusCode >= 200 && j.statusCode <= 399
|
||||
}
|
||||
|
||||
type textHTTPResponse struct {
|
||||
statusCode int
|
||||
data string
|
||||
headers []headerval
|
||||
}
|
||||
|
||||
func (j textHTTPResponse) Write(g *gin.Context) {
|
||||
for _, v := range j.headers {
|
||||
g.Header(v.Key, v.Val)
|
||||
}
|
||||
g.String(j.statusCode, "%s", j.data)
|
||||
}
|
||||
|
||||
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||
j.headers = append(j.headers, headerval{k, v})
|
||||
return j
|
||||
}
|
||||
|
||||
func (j textHTTPResponse) IsSuccess() bool {
|
||||
return j.statusCode >= 200 && j.statusCode <= 399
|
||||
}
|
||||
|
||||
type dataHTTPResponse struct {
|
||||
statusCode int
|
||||
data []byte
|
||||
contentType string
|
||||
headers []headerval
|
||||
}
|
||||
|
||||
func (j dataHTTPResponse) Write(g *gin.Context) {
|
||||
for _, v := range j.headers {
|
||||
g.Header(v.Key, v.Val)
|
||||
}
|
||||
g.Data(j.statusCode, j.contentType, j.data)
|
||||
}
|
||||
|
||||
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||
j.headers = append(j.headers, headerval{k, v})
|
||||
return j
|
||||
}
|
||||
|
||||
func (j dataHTTPResponse) IsSuccess() bool {
|
||||
return j.statusCode >= 200 && j.statusCode <= 399
|
||||
}
|
||||
|
||||
type fileHTTPResponse struct {
|
||||
mimetype string
|
||||
filepath string
|
||||
filename *string
|
||||
headers []headerval
|
||||
}
|
||||
|
||||
func (j fileHTTPResponse) Write(g *gin.Context) {
|
||||
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
|
||||
if j.filename != nil {
|
||||
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
|
||||
|
||||
}
|
||||
for _, v := range j.headers {
|
||||
g.Header(v.Key, v.Val)
|
||||
}
|
||||
g.File(j.filepath)
|
||||
}
|
||||
|
||||
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||
j.headers = append(j.headers, headerval{k, v})
|
||||
return j
|
||||
}
|
||||
|
||||
func (j fileHTTPResponse) IsSuccess() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
type downloadDataHTTPResponse struct {
|
||||
statusCode int
|
||||
mimetype string
|
||||
data []byte
|
||||
filename *string
|
||||
headers []headerval
|
||||
}
|
||||
|
||||
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
|
||||
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
|
||||
if j.filename != nil {
|
||||
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
|
||||
|
||||
}
|
||||
for _, v := range j.headers {
|
||||
g.Header(v.Key, v.Val)
|
||||
}
|
||||
g.Data(j.statusCode, j.mimetype, j.data)
|
||||
}
|
||||
|
||||
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||
j.headers = append(j.headers, headerval{k, v})
|
||||
return j
|
||||
}
|
||||
|
||||
func (j downloadDataHTTPResponse) IsSuccess() bool {
|
||||
return j.statusCode >= 200 && j.statusCode <= 399
|
||||
}
|
||||
|
||||
type redirectHTTPResponse struct {
|
||||
statusCode int
|
||||
url string
|
||||
headers []headerval
|
||||
}
|
||||
|
||||
func (j redirectHTTPResponse) Write(g *gin.Context) {
|
||||
g.Redirect(j.statusCode, j.url)
|
||||
}
|
||||
|
||||
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||
j.headers = append(j.headers, headerval{k, v})
|
||||
return j
|
||||
}
|
||||
|
||||
func (j redirectHTTPResponse) IsSuccess() bool {
|
||||
return j.statusCode >= 200 && j.statusCode <= 399
|
||||
}
|
||||
|
||||
type jsonAPIErrResponse struct {
|
||||
err *exerr.ExErr
|
||||
headers []headerval
|
||||
}
|
||||
|
||||
func (j jsonAPIErrResponse) Write(g *gin.Context) {
|
||||
j.err.Output(g)
|
||||
}
|
||||
|
||||
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
|
||||
j.headers = append(j.headers, headerval{k, v})
|
||||
return j
|
||||
}
|
||||
|
||||
func (j jsonAPIErrResponse) IsSuccess() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func Status(sc int) HTTPResponse {
|
||||
return &emptyHTTPResponse{statusCode: sc}
|
||||
}
|
||||
|
||||
func JSON(sc int, data any) HTTPResponse {
|
||||
return &jsonHTTPResponse{statusCode: sc, data: data}
|
||||
}
|
||||
|
||||
func Data(sc int, contentType string, data []byte) HTTPResponse {
|
||||
return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data}
|
||||
}
|
||||
|
||||
func Text(sc int, data string) HTTPResponse {
|
||||
return &textHTTPResponse{statusCode: sc, data: data}
|
||||
}
|
||||
|
||||
func File(mimetype string, filepath string) HTTPResponse {
|
||||
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath}
|
||||
}
|
||||
|
||||
func Download(mimetype string, filepath string, filename string) HTTPResponse {
|
||||
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
|
||||
}
|
||||
|
||||
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
|
||||
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
|
||||
}
|
||||
|
||||
func Redirect(sc int, newURL string) HTTPResponse {
|
||||
return &redirectHTTPResponse{statusCode: sc, url: newURL}
|
||||
}
|
||||
|
||||
func Error(e error) HTTPResponse {
|
||||
return &jsonAPIErrResponse{
|
||||
err: exerr.FromError(e),
|
||||
}
|
||||
}
|
||||
|
||||
func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
|
||||
return &jsonAPIErrResponse{
|
||||
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
|
||||
}
|
||||
}
|
||||
|
||||
func NotImplemented() HTTPResponse {
|
||||
return Error(exerr.New(exerr.TypeNotImplemented, "").Build())
|
||||
}
|
218
ginext/routes.go
218
ginext/routes.go
@@ -1,218 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"net/http"
|
||||
"path"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var anyMethods = []string{
|
||||
http.MethodGet, http.MethodPost, http.MethodPut, http.MethodPatch,
|
||||
http.MethodHead, http.MethodOptions, http.MethodDelete, http.MethodConnect,
|
||||
http.MethodTrace,
|
||||
}
|
||||
|
||||
type GinRoutesWrapper struct {
|
||||
wrapper *GinWrapper
|
||||
routes gin.IRouter
|
||||
absPath string
|
||||
defaultHandler []gin.HandlerFunc
|
||||
}
|
||||
|
||||
type GinRouteBuilder struct {
|
||||
routes *GinRoutesWrapper
|
||||
|
||||
method string
|
||||
relPath string
|
||||
absPath string
|
||||
handlers []gin.HandlerFunc
|
||||
}
|
||||
|
||||
func (w *GinWrapper) Routes() *GinRoutesWrapper {
|
||||
return &GinRoutesWrapper{
|
||||
wrapper: w,
|
||||
routes: w.engine,
|
||||
absPath: "",
|
||||
defaultHandler: make([]gin.HandlerFunc, 0),
|
||||
}
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) Group(relativePath string) *GinRoutesWrapper {
|
||||
return &GinRoutesWrapper{
|
||||
wrapper: w.wrapper,
|
||||
routes: w.routes.Group(relativePath),
|
||||
defaultHandler: langext.ArrCopy(w.defaultHandler),
|
||||
absPath: joinPaths(w.absPath, relativePath),
|
||||
}
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper {
|
||||
defHandler := langext.ArrCopy(w.defaultHandler)
|
||||
defHandler = append(defHandler, middleware...)
|
||||
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler}
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper {
|
||||
defHandler := langext.ArrCopy(w.defaultHandler)
|
||||
defHandler = append(defHandler, func(g *gin.Context) {
|
||||
g.Set("goext.jsonfilter", filter)
|
||||
})
|
||||
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler}
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder {
|
||||
return w._route(http.MethodGet, relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) POST(relativePath string) *GinRouteBuilder {
|
||||
return w._route(http.MethodPost, relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) DELETE(relativePath string) *GinRouteBuilder {
|
||||
return w._route(http.MethodDelete, relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) PATCH(relativePath string) *GinRouteBuilder {
|
||||
return w._route(http.MethodPatch, relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) PUT(relativePath string) *GinRouteBuilder {
|
||||
return w._route(http.MethodPut, relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) OPTIONS(relativePath string) *GinRouteBuilder {
|
||||
return w._route(http.MethodOptions, relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) HEAD(relativePath string) *GinRouteBuilder {
|
||||
return w._route(http.MethodHead, relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) COUNT(relativePath string) *GinRouteBuilder {
|
||||
return w._route("COUNT", relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) Any(relativePath string) *GinRouteBuilder {
|
||||
return w._route("*", relativePath)
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) _route(method string, relativePath string) *GinRouteBuilder {
|
||||
return &GinRouteBuilder{
|
||||
routes: w,
|
||||
method: method,
|
||||
relPath: relativePath,
|
||||
absPath: joinPaths(w.absPath, relativePath),
|
||||
handlers: langext.ArrCopy(w.defaultHandler),
|
||||
}
|
||||
}
|
||||
|
||||
func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
|
||||
w.handlers = append(w.handlers, middleware...)
|
||||
return w
|
||||
}
|
||||
|
||||
func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder {
|
||||
w.handlers = append(w.handlers, func(g *gin.Context) {
|
||||
g.Set("goext.jsonfilter", filter)
|
||||
})
|
||||
return w
|
||||
}
|
||||
|
||||
func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {
|
||||
|
||||
if w.routes.wrapper.bufferBody {
|
||||
arr := make([]gin.HandlerFunc, 0, len(w.handlers)+1)
|
||||
arr = append(arr, BodyBuffer)
|
||||
arr = append(arr, w.handlers...)
|
||||
w.handlers = arr
|
||||
}
|
||||
|
||||
middlewareNames := langext.ArrMap(w.handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) })
|
||||
handlerName := nameOfFunction(handler)
|
||||
|
||||
w.handlers = append(w.handlers, Wrap(w.routes.wrapper, handler))
|
||||
|
||||
methodName := w.method
|
||||
|
||||
if w.method == "*" {
|
||||
methodName = "ANY"
|
||||
for _, method := range anyMethods {
|
||||
w.routes.routes.Handle(method, w.relPath, w.handlers...)
|
||||
}
|
||||
} else {
|
||||
w.routes.routes.Handle(w.method, w.relPath, w.handlers...)
|
||||
}
|
||||
|
||||
w.routes.wrapper.routeSpecs = append(w.routes.wrapper.routeSpecs, ginRouteSpec{
|
||||
Method: methodName,
|
||||
URL: w.absPath,
|
||||
Middlewares: middlewareNames,
|
||||
Handler: handlerName,
|
||||
})
|
||||
}
|
||||
|
||||
func (w *GinWrapper) NoRoute(handler WHandlerFunc) {
|
||||
|
||||
handlers := make([]gin.HandlerFunc, 0)
|
||||
|
||||
if w.bufferBody {
|
||||
handlers = append(handlers, BodyBuffer)
|
||||
}
|
||||
|
||||
middlewareNames := langext.ArrMap(handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) })
|
||||
handlerName := nameOfFunction(handler)
|
||||
|
||||
handlers = append(handlers, Wrap(w, handler))
|
||||
|
||||
w.engine.NoRoute(handlers...)
|
||||
|
||||
w.routeSpecs = append(w.routeSpecs, ginRouteSpec{
|
||||
Method: "ANY",
|
||||
URL: "[NO_ROUTE]",
|
||||
Middlewares: middlewareNames,
|
||||
Handler: handlerName,
|
||||
})
|
||||
}
|
||||
|
||||
func nameOfFunction(f any) string {
|
||||
|
||||
fname := runtime.FuncForPC(reflect.ValueOf(f).Pointer()).Name()
|
||||
|
||||
split := strings.Split(fname, "/")
|
||||
if len(split) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
fname = split[len(split)-1]
|
||||
|
||||
// https://stackoverflow.com/a/32925345/1761622
|
||||
if strings.HasSuffix(fname, "-fm") {
|
||||
fname = fname[:len(fname)-len("-fm")]
|
||||
}
|
||||
|
||||
return fname
|
||||
}
|
||||
|
||||
// joinPaths is copied verbatim from gin@v1.9.1/gin.go
|
||||
func joinPaths(absolutePath, relativePath string) string {
|
||||
if relativePath == "" {
|
||||
return absolutePath
|
||||
}
|
||||
|
||||
finalPath := path.Join(absolutePath, relativePath)
|
||||
if lastChar(relativePath) == '/' && lastChar(finalPath) != '/' {
|
||||
return finalPath + "/"
|
||||
}
|
||||
return finalPath
|
||||
}
|
||||
|
||||
func lastChar(str string) uint8 {
|
||||
if str == "" {
|
||||
panic("The length of the string can't be 0")
|
||||
}
|
||||
return str[len(str)-1]
|
||||
}
|
@@ -1,11 +0,0 @@
|
||||
package ginext
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type SessionObject interface {
|
||||
Init(g *gin.Context, ctx context.Context) error
|
||||
Finish(ctx context.Context, resp HTTPResponse) error
|
||||
}
|
59
go.mod
59
go.mod
@@ -1,49 +1,32 @@
|
||||
module gogs.mikescher.com/BlackForestBytes/goext
|
||||
|
||||
go 1.21
|
||||
go 1.19
|
||||
|
||||
require (
|
||||
github.com/gin-gonic/gin v1.9.1
|
||||
github.com/golang/snappy v0.0.4
|
||||
github.com/google/go-cmp v0.5.9
|
||||
github.com/jmoiron/sqlx v1.3.5
|
||||
github.com/rs/xid v1.5.0
|
||||
github.com/rs/zerolog v1.31.0
|
||||
go.mongodb.org/mongo-driver v1.13.1
|
||||
golang.org/x/crypto v0.16.0
|
||||
golang.org/x/sys v0.15.0
|
||||
golang.org/x/term v0.15.0
|
||||
github.com/klauspost/compress v1.16.6
|
||||
github.com/kr/pretty v0.1.0
|
||||
github.com/montanaflynn/stats v0.7.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/stretchr/testify v1.8.4
|
||||
github.com/tidwall/pretty v1.0.0
|
||||
github.com/xdg-go/scram v1.1.2
|
||||
github.com/xdg-go/stringprep v1.0.4
|
||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a
|
||||
go.mongodb.org/mongo-driver v1.11.7
|
||||
golang.org/x/crypto v0.10.0
|
||||
golang.org/x/sync v0.3.0
|
||||
golang.org/x/sys v0.9.0
|
||||
golang.org/x/term v0.9.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/bytedance/sonic v1.10.2 // indirect
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||
github.com/chenzhuoyu/iasm v0.9.1 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.16.0 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.17.4 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.6 // indirect
|
||||
github.com/leodido/go-urn v1.2.4 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.1.0 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/kr/text v0.1.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
github.com/xdg-go/scram v1.1.2 // indirect
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
|
||||
golang.org/x/arch v0.6.0 // indirect
|
||||
golang.org/x/net v0.19.0 // indirect
|
||||
golang.org/x/sync v0.5.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
google.golang.org/protobuf v1.31.0 // indirect
|
||||
golang.org/x/text v0.10.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
176
go.sum
176
go.sum
@@ -1,168 +1,72 @@
|
||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
||||
github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
|
||||
github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
||||
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
|
||||
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||
github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
|
||||
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
||||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
||||
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24=
|
||||
github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||
github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE=
|
||||
github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
||||
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM=
|
||||
github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/compress v1.17.1 h1:NE3C767s2ak2bweCZo3+rdP4U/HoyVXLv/X9f2gPS5g=
|
||||
github.com/klauspost/compress v1.17.1/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
|
||||
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA=
|
||||
github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
|
||||
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
|
||||
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||
github.com/klauspost/compress v1.16.6 h1:91SKEy4K37vkp255cJ8QesJhjyRO0hn9i9G0GoUwLsk=
|
||||
github.com/klauspost/compress v1.16.6/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
|
||||
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
|
||||
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
|
||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
|
||||
github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
|
||||
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
|
||||
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
|
||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE=
|
||||
go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
|
||||
go.mongodb.org/mongo-driver v1.13.0 h1:67DgFFjYOCMWdtTEmKFpV3ffWlFnh+CYZ8ZS/tXWUfY=
|
||||
go.mongodb.org/mongo-driver v1.13.0/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
|
||||
go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk=
|
||||
go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
|
||||
golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.6.0 h1:S0JTfE48HbRj80+4tbvZDYsJ3tGv6BUU3XxyZ7CirAc=
|
||||
golang.org/x/arch v0.6.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
go.mongodb.org/mongo-driver v1.11.7 h1:LIwYxASDLGUg/8wOhgOOZhX8tQa/9tgZPgzZoVqJvcs=
|
||||
go.mongodb.org/mongo-driver v1.11.7/go.mod h1:G9TgswdsWjX4tmDA5zfs2+6AEPpYJwqblyjsfuh8oXY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
|
||||
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
||||
golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA=
|
||||
golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g=
|
||||
golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY=
|
||||
golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
|
||||
golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM=
|
||||
golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos=
|
||||
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
|
||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||
golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg=
|
||||
golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ=
|
||||
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
|
||||
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
|
||||
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE=
|
||||
golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -170,47 +74,27 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q=
|
||||
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
|
||||
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
|
||||
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
|
||||
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
|
||||
golang.org/x/term v0.14.0 h1:LGK9IlZ8T9jvdy6cTdfKUCltatMFOehAQo9SRC46UQ8=
|
||||
golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww=
|
||||
golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
|
||||
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
|
||||
golang.org/x/term v0.9.0 h1:GRRCnKYhdQrD8kfRAdQ6Zcw1P0OcELxGLKJvtjVMZ28=
|
||||
golang.org/x/term v0.9.0/go.mod h1:M6DEAAIenWoTxdKrOltXcmDY3rSplQUkrvaDU5FcQyo=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58=
|
||||
golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
||||
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||
|
@@ -1,5 +1,5 @@
|
||||
package goext
|
||||
|
||||
const GoextVersion = "0.0.339"
|
||||
const GoextVersion = "0.0.166"
|
||||
|
||||
const GoextVersionTimestamp = "2023-12-07T10:54:36+0100"
|
||||
const GoextVersionTimestamp = "2023-06-19T10:25:41+0200"
|
||||
|
@@ -156,6 +156,7 @@ import (
|
||||
// an error.
|
||||
func Marshal(v any) ([]byte, error) {
|
||||
e := newEncodeState()
|
||||
defer encodeStatePool.Put(e)
|
||||
|
||||
err := e.marshal(v, encOpts{escapeHTML: true})
|
||||
if err != nil {
|
||||
@@ -163,8 +164,6 @@ func Marshal(v any) ([]byte, error) {
|
||||
}
|
||||
buf := append([]byte(nil), e.Bytes()...)
|
||||
|
||||
encodeStatePool.Put(e)
|
||||
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
@@ -175,9 +174,9 @@ type IndentOpt struct {
|
||||
|
||||
// MarshalSafeCollections is like Marshal except it will marshal nil maps and
|
||||
// slices as '{}' and '[]' respectfully instead of 'null'
|
||||
func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt, filter *string) ([]byte, error) {
|
||||
func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt) ([]byte, error) {
|
||||
e := &encodeState{}
|
||||
err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps, filter: filter})
|
||||
err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -394,9 +393,6 @@ type encOpts struct {
|
||||
nilSafeSlices bool
|
||||
// nilSafeMaps marshals a nil maps '{}' instead of 'null'
|
||||
nilSafeMaps bool
|
||||
// filter matches jsonfilter tag of struct
|
||||
// marshals if no jsonfilter is set or otherwise if jsonfilter has the filter value
|
||||
filter *string
|
||||
}
|
||||
|
||||
type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts)
|
||||
@@ -781,8 +777,6 @@ FieldLoop:
|
||||
|
||||
if f.omitEmpty && isEmptyValue(fv) {
|
||||
continue
|
||||
} else if opts.filter != nil && len(f.jsonfilter) > 0 && !f.jsonfilter.Contains(*opts.filter) {
|
||||
continue
|
||||
}
|
||||
e.WriteByte(next)
|
||||
next = ','
|
||||
@@ -1226,28 +1220,15 @@ type field struct {
|
||||
nameNonEsc string // `"` + name + `":`
|
||||
nameEscHTML string // `"` + HTMLEscape(name) + `":`
|
||||
|
||||
tag bool
|
||||
index []int
|
||||
typ reflect.Type
|
||||
omitEmpty bool
|
||||
jsonfilter jsonfilter
|
||||
quoted bool
|
||||
tag bool
|
||||
index []int
|
||||
typ reflect.Type
|
||||
omitEmpty bool
|
||||
quoted bool
|
||||
|
||||
encoder encoderFunc
|
||||
}
|
||||
|
||||
// jsonfilter stores the value of the jsonfilter struct tag
|
||||
type jsonfilter []string
|
||||
|
||||
func (j jsonfilter) Contains(t string) bool {
|
||||
for _, tag := range j {
|
||||
if t == tag {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// byIndex sorts field by index sequence.
|
||||
type byIndex []field
|
||||
|
||||
@@ -1323,13 +1304,6 @@ func typeFields(t reflect.Type) structFields {
|
||||
if !isValidTag(name) {
|
||||
name = ""
|
||||
}
|
||||
|
||||
var jsonfilter []string
|
||||
jsonfilterTag := sf.Tag.Get("jsonfilter")
|
||||
if jsonfilterTag != "" && jsonfilterTag != "-" {
|
||||
jsonfilter = strings.Split(jsonfilterTag, ",")
|
||||
}
|
||||
|
||||
index := make([]int, len(f.index)+1)
|
||||
copy(index, f.index)
|
||||
index[len(f.index)] = i
|
||||
@@ -1360,13 +1334,12 @@ func typeFields(t reflect.Type) structFields {
|
||||
name = sf.Name
|
||||
}
|
||||
field := field{
|
||||
name: name,
|
||||
tag: tagged,
|
||||
index: index,
|
||||
typ: ft,
|
||||
omitEmpty: opts.Contains("omitempty"),
|
||||
jsonfilter: jsonfilter,
|
||||
quoted: quoted,
|
||||
name: name,
|
||||
tag: tagged,
|
||||
index: index,
|
||||
typ: ft,
|
||||
omitEmpty: opts.Contains("omitempty"),
|
||||
quoted: quoted,
|
||||
}
|
||||
field.nameBytes = []byte(field.name)
|
||||
field.equalFold = foldFunc(field.nameBytes)
|
||||
|
@@ -1253,10 +1253,6 @@ func TestMarshalSafeCollections(t *testing.T) {
|
||||
nilMapStruct struct {
|
||||
NilMap map[string]interface{} `json:"nil_map"`
|
||||
}
|
||||
testWithFilter struct {
|
||||
Test1 string `json:"test1" jsonfilter:"FILTERONE"`
|
||||
Test2 string `json:"test2" jsonfilter:"FILTERTWO"`
|
||||
}
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
@@ -1275,12 +1271,10 @@ func TestMarshalSafeCollections(t *testing.T) {
|
||||
{map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}"},
|
||||
{pNilMap, "null"},
|
||||
{nilMapStruct{}, "{\"nil_map\":{}}"},
|
||||
{testWithFilter{}, "{\"test1\":\"\"}"},
|
||||
}
|
||||
|
||||
filter := "FILTERONE"
|
||||
for i, tt := range tests {
|
||||
b, err := MarshalSafeCollections(tt.in, true, true, nil, &filter)
|
||||
b, err := MarshalSafeCollections(tt.in, true, true, nil)
|
||||
if err != nil {
|
||||
t.Errorf("test %d, unexpected failure: %v", i, err)
|
||||
}
|
||||
|
@@ -97,10 +97,7 @@ func equalFoldRight(s, t []byte) bool {
|
||||
t = t[size:]
|
||||
|
||||
}
|
||||
if len(t) > 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return len(t) == 0
|
||||
}
|
||||
|
||||
// asciiEqualFold is a specialization of bytes.EqualFold for use when
|
||||
|
@@ -52,9 +52,7 @@ func TestFold(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestFoldAgainstUnicode(t *testing.T) {
|
||||
const bufSize = 5
|
||||
buf1 := make([]byte, 0, bufSize)
|
||||
buf2 := make([]byte, 0, bufSize)
|
||||
var buf1, buf2 []byte
|
||||
var runes []rune
|
||||
for i := 0x20; i <= 0x7f; i++ {
|
||||
runes = append(runes, rune(i))
|
||||
@@ -96,12 +94,8 @@ func TestFoldAgainstUnicode(t *testing.T) {
|
||||
continue
|
||||
}
|
||||
for _, r2 := range runes {
|
||||
buf1 := append(buf1[:0], 'x')
|
||||
buf2 := append(buf2[:0], 'x')
|
||||
buf1 = buf1[:1+utf8.EncodeRune(buf1[1:bufSize], r)]
|
||||
buf2 = buf2[:1+utf8.EncodeRune(buf2[1:bufSize], r2)]
|
||||
buf1 = append(buf1, 'x')
|
||||
buf2 = append(buf2, 'x')
|
||||
buf1 = append(utf8.AppendRune(append(buf1[:0], 'x'), r), 'x')
|
||||
buf2 = append(utf8.AppendRune(append(buf2[:0], 'x'), r2), 'x')
|
||||
want := bytes.EqualFold(buf1, buf2)
|
||||
if got := ff.fold(buf1, buf2); got != want {
|
||||
t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want)
|
||||
|
@@ -17,7 +17,6 @@ type GoJsonRender struct {
|
||||
NilSafeSlices bool
|
||||
NilSafeMaps bool
|
||||
Indent *IndentOpt
|
||||
Filter *string
|
||||
}
|
||||
|
||||
func (r GoJsonRender) Render(w http.ResponseWriter) error {
|
||||
@@ -26,7 +25,7 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error {
|
||||
header["Content-Type"] = []string{"application/json; charset=utf-8"}
|
||||
}
|
||||
|
||||
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter)
|
||||
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@@ -116,18 +116,3 @@ func TestNumberIsValid(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNumberIsValid(b *testing.B) {
|
||||
s := "-61657.61667E+61673"
|
||||
for i := 0; i < b.N; i++ {
|
||||
isValidNumber(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNumberIsValidRegexp(b *testing.B) {
|
||||
var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
|
||||
s := "-61657.61667E+61673"
|
||||
for i := 0; i < b.N; i++ {
|
||||
jsonNumberRegexp.MatchString(s)
|
||||
}
|
||||
}
|
||||
|
@@ -594,7 +594,7 @@ func (s *scanner) error(c byte, context string) int {
|
||||
return scanError
|
||||
}
|
||||
|
||||
// quoteChar formats c as a quoted character literal
|
||||
// quoteChar formats c as a quoted character literal.
|
||||
func quoteChar(c byte) string {
|
||||
// special cases - different from quoted strings
|
||||
if c == '\'' {
|
||||
|
@@ -179,9 +179,11 @@ func nonSpace(b []byte) bool {
|
||||
|
||||
// An Encoder writes JSON values to an output stream.
|
||||
type Encoder struct {
|
||||
w io.Writer
|
||||
err error
|
||||
escapeHTML bool
|
||||
w io.Writer
|
||||
err error
|
||||
escapeHTML bool
|
||||
nilSafeSlices bool
|
||||
nilSafeMaps bool
|
||||
|
||||
indentBuf *bytes.Buffer
|
||||
indentPrefix string
|
||||
@@ -202,8 +204,11 @@ func (enc *Encoder) Encode(v any) error {
|
||||
if enc.err != nil {
|
||||
return enc.err
|
||||
}
|
||||
|
||||
e := newEncodeState()
|
||||
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
|
||||
defer encodeStatePool.Put(e)
|
||||
|
||||
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML, nilSafeMaps: enc.nilSafeMaps, nilSafeSlices: enc.nilSafeSlices})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -231,7 +236,6 @@ func (enc *Encoder) Encode(v any) error {
|
||||
if _, err = enc.w.Write(b); err != nil {
|
||||
enc.err = err
|
||||
}
|
||||
encodeStatePool.Put(e)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -243,6 +247,13 @@ func (enc *Encoder) SetIndent(prefix, indent string) {
|
||||
enc.indentValue = indent
|
||||
}
|
||||
|
||||
// SetNilSafeCollection specifies whether to represent nil slices and maps as
|
||||
// '[]' or '{}' respectfully (flag on) instead of 'null' (default) when marshaling json.
|
||||
func (enc *Encoder) SetNilSafeCollection(nilSafeSlices bool, nilSafeMaps bool) {
|
||||
enc.nilSafeSlices = nilSafeSlices
|
||||
enc.nilSafeMaps = nilSafeMaps
|
||||
}
|
||||
|
||||
// SetEscapeHTML specifies whether problematic HTML characters
|
||||
// should be escaped inside JSON quoted strings.
|
||||
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e
|
||||
|
@@ -12,6 +12,7 @@ import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"reflect"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
@@ -41,7 +42,7 @@ false
|
||||
|
||||
func TestEncoder(t *testing.T) {
|
||||
for i := 0; i <= len(streamTest); i++ {
|
||||
var buf bytes.Buffer
|
||||
var buf strings.Builder
|
||||
enc := NewEncoder(&buf)
|
||||
// Check that enc.SetIndent("", "") turns off indentation.
|
||||
enc.SetIndent(">", ".")
|
||||
@@ -59,6 +60,43 @@ func TestEncoder(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncoderErrorAndReuseEncodeState(t *testing.T) {
|
||||
// Disable the GC temporarily to prevent encodeState's in Pool being cleaned away during the test.
|
||||
percent := debug.SetGCPercent(-1)
|
||||
defer debug.SetGCPercent(percent)
|
||||
|
||||
// Trigger an error in Marshal with cyclic data.
|
||||
type Dummy struct {
|
||||
Name string
|
||||
Next *Dummy
|
||||
}
|
||||
dummy := Dummy{Name: "Dummy"}
|
||||
dummy.Next = &dummy
|
||||
|
||||
var buf bytes.Buffer
|
||||
enc := NewEncoder(&buf)
|
||||
if err := enc.Encode(dummy); err == nil {
|
||||
t.Errorf("Encode(dummy) == nil; want error")
|
||||
}
|
||||
|
||||
type Data struct {
|
||||
A string
|
||||
I int
|
||||
}
|
||||
data := Data{A: "a", I: 1}
|
||||
if err := enc.Encode(data); err != nil {
|
||||
t.Errorf("Marshal(%v) = %v", data, err)
|
||||
}
|
||||
|
||||
var data2 Data
|
||||
if err := Unmarshal(buf.Bytes(), &data2); err != nil {
|
||||
t.Errorf("Unmarshal(%v) = %v", data2, err)
|
||||
}
|
||||
if data2 != data {
|
||||
t.Errorf("expect: %v, but get: %v", data, data2)
|
||||
}
|
||||
}
|
||||
|
||||
var streamEncodedIndent = `0.1
|
||||
"hello"
|
||||
null
|
||||
@@ -77,7 +115,7 @@ false
|
||||
`
|
||||
|
||||
func TestEncoderIndent(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
var buf strings.Builder
|
||||
enc := NewEncoder(&buf)
|
||||
enc.SetIndent(">", ".")
|
||||
for _, v := range streamTest {
|
||||
@@ -147,7 +185,7 @@ func TestEncoderSetEscapeHTML(t *testing.T) {
|
||||
`{"bar":"\"<html>foobar</html>\""}`,
|
||||
},
|
||||
} {
|
||||
var buf bytes.Buffer
|
||||
var buf strings.Builder
|
||||
enc := NewEncoder(&buf)
|
||||
if err := enc.Encode(tt.v); err != nil {
|
||||
t.Errorf("Encode(%s): %s", tt.name, err)
|
||||
@@ -309,21 +347,6 @@ func TestBlocking(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkEncoderEncode(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
type T struct {
|
||||
X, Y string
|
||||
}
|
||||
v := &T{"foo", "bar"}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if err := NewEncoder(io.Discard).Encode(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
type tokenStreamCase struct {
|
||||
json string
|
||||
expTokens []any
|
||||
@@ -472,3 +495,45 @@ func TestHTTPDecoding(t *testing.T) {
|
||||
t.Errorf("err = %v; want io.EOF", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncoderSetNilSafeCollection(t *testing.T) {
|
||||
var (
|
||||
nilSlice []interface{}
|
||||
pNilSlice *[]interface{}
|
||||
nilMap map[string]interface{}
|
||||
pNilMap *map[string]interface{}
|
||||
)
|
||||
for _, tt := range []struct {
|
||||
name string
|
||||
v interface{}
|
||||
want string
|
||||
rescuedWant string
|
||||
}{
|
||||
{"nilSlice", nilSlice, "null", "[]"},
|
||||
{"nonNilSlice", []interface{}{}, "[]", "[]"},
|
||||
{"sliceWithValues", []interface{}{1, 2, 3}, "[1,2,3]", "[1,2,3]"},
|
||||
{"pNilSlice", pNilSlice, "null", "null"},
|
||||
{"nilMap", nilMap, "null", "{}"},
|
||||
{"nonNilMap", map[string]interface{}{}, "{}", "{}"},
|
||||
{"mapWithValues", map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}", "{\"1\":1,\"2\":2,\"3\":3}"},
|
||||
{"pNilMap", pNilMap, "null", "null"},
|
||||
} {
|
||||
var buf bytes.Buffer
|
||||
enc := NewEncoder(&buf)
|
||||
if err := enc.Encode(tt.v); err != nil {
|
||||
t.Fatalf("Encode(%s): %s", tt.name, err)
|
||||
}
|
||||
if got := strings.TrimSpace(buf.String()); got != tt.want {
|
||||
t.Errorf("Encode(%s) = %#q, want %#q", tt.name, got, tt.want)
|
||||
}
|
||||
buf.Reset()
|
||||
enc.SetNilSafeCollection(true, true)
|
||||
if err := enc.Encode(tt.v); err != nil {
|
||||
t.Fatalf("SetNilSafeCollection(true) Encode(%s): %s", tt.name, err)
|
||||
}
|
||||
if got := strings.TrimSpace(buf.String()); got != tt.rescuedWant {
|
||||
t.Errorf("SetNilSafeCollection(true) Encode(%s) = %#q, want %#q",
|
||||
tt.name, got, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,54 +0,0 @@
|
||||
|
||||
Google OAuth Setup (to send mails)
|
||||
==================================
|
||||
|
||||
|
||||
- Login @ https://console.cloud.google.com
|
||||
|
||||
- GMail API akivieren: https://console.cloud.google.com/apis/library/gmail.googleapis.com?
|
||||
|
||||
- Create new Project (aka 'BackendMailAPI') @ https://console.cloud.google.com/projectcreate
|
||||
User Type: Intern
|
||||
Anwendungsname: 'BackendMailAPI'
|
||||
Support-Email: ...
|
||||
Authorisierte Domains: 'heydyno.de' (or project domain)
|
||||
Kontakt-Email: ...
|
||||
|
||||
|
||||
- Unter "Anmeldedaten" neuer OAuth Client erstellen @ https://console.cloud.google.com/apis/credentials
|
||||
Anwendungstyp: Web
|
||||
Name: 'BackendMailOAuth'
|
||||
Redirect-Uri: 'http://localhost/oauth'
|
||||
Client-ID und Client-Key merken
|
||||
|
||||
- Open in Browser:
|
||||
https://accounts.google.com/o/oauth2/v2/auth?redirect_uri=http://localhost/oauth&prompt=consent&response_type=code&client_id={...}&scope=https://www.googleapis.com/auth/gmail.send&access_type=offline
|
||||
Code aus redirected URI merken
|
||||
|
||||
- Code via request einlösen (und refresh_roken merken):
|
||||
|
||||
```
|
||||
curl --request POST \
|
||||
--url https://oauth2.googleapis.com/token \
|
||||
--data code={...} \
|
||||
--data redirect_uri=http://localhost/oauth \
|
||||
--data client_id={...} \
|
||||
--data client_secret={...} \
|
||||
--data grant_type=authorization_code \
|
||||
--data scope=https://www.googleapis.com/auth/gmail.send
|
||||
```
|
||||
|
||||
- Fertig, mit `client_id`, `client_secret` und `refresh_token` kann das package benutzt werden
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@@ -1,46 +0,0 @@
|
||||
package googleapi
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type MailAttachment struct {
|
||||
IsInline bool
|
||||
ContentType string
|
||||
Filename string
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func (a MailAttachment) dump() []string {
|
||||
res := make([]string, 0, 4)
|
||||
|
||||
if a.ContentType != "" {
|
||||
res = append(res, "Content-Type: "+a.ContentType+"; charset=UTF-8")
|
||||
}
|
||||
|
||||
res = append(res, "Content-Transfer-Encoding: base64")
|
||||
|
||||
if a.IsInline {
|
||||
if a.Filename != "" {
|
||||
res = append(res, fmt.Sprintf("Content-Disposition: inline;filename=\"%s\"", a.Filename))
|
||||
} else {
|
||||
res = append(res, "Content-Disposition: inline")
|
||||
}
|
||||
} else {
|
||||
if a.Filename != "" {
|
||||
res = append(res, fmt.Sprintf("Content-Disposition: attachment;filename=\"%s\"", a.Filename))
|
||||
} else {
|
||||
res = append(res, "Content-Disposition: attachment")
|
||||
}
|
||||
}
|
||||
|
||||
b64 := base64.StdEncoding.EncodeToString(a.Data)
|
||||
for i := 0; i < len(b64); i += 80 {
|
||||
res = append(res, b64[i:min(i+80, len(b64))])
|
||||
}
|
||||
|
||||
res = append(res)
|
||||
|
||||
return res
|
||||
}
|
@@ -1,6 +0,0 @@
|
||||
package googleapi
|
||||
|
||||
type MailBody struct {
|
||||
Plain string
|
||||
HTML string
|
||||
}
|
@@ -1,224 +0,0 @@
|
||||
package googleapi
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"mime"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// https://datatracker.ietf.org/doc/html/rfc2822
|
||||
func encodeMimeMail(from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) string {
|
||||
|
||||
data := make([]string, 0, 32)
|
||||
|
||||
data = append(data, "Date: "+time.Now().Format(time.RFC1123Z))
|
||||
data = append(data, "MIME-Version: 1.0")
|
||||
data = append(data, "From: "+mime.QEncoding.Encode("UTF-8", from))
|
||||
data = append(data, "To: "+strings.Join(langext.ArrMap(recipients, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", "))
|
||||
if len(cc) > 0 {
|
||||
data = append(data, "To: "+strings.Join(langext.ArrMap(cc, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", "))
|
||||
}
|
||||
if len(bcc) > 0 {
|
||||
data = append(data, "Bcc: "+strings.Join(langext.ArrMap(bcc, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", "))
|
||||
}
|
||||
data = append(data, "Subject: "+mime.QEncoding.Encode("UTF-8", subject))
|
||||
|
||||
hasInlineAttachments := langext.ArrAny(attachments, func(v MailAttachment) bool { return v.IsInline })
|
||||
hasNormalAttachments := langext.ArrAny(attachments, func(v MailAttachment) bool { return !v.IsInline })
|
||||
hasPlain := body.Plain != ""
|
||||
hasHTML := body.HTML != ""
|
||||
|
||||
mixedBoundary := langext.MustRawHexUUID()
|
||||
relatedBoundary := langext.MustRawHexUUID()
|
||||
altBoundary := langext.MustRawHexUUID()
|
||||
|
||||
inlineAttachments := langext.ArrFilter(attachments, func(v MailAttachment) bool { return v.IsInline })
|
||||
normalAttachments := langext.ArrFilter(attachments, func(v MailAttachment) bool { return !v.IsInline })
|
||||
|
||||
if hasInlineAttachments && hasNormalAttachments {
|
||||
// "mixed+related"
|
||||
|
||||
data = append(data, "Content-Type: multipart/mixed; boundary="+mixedBoundary)
|
||||
data = append(data, "")
|
||||
data = append(data, "--"+mixedBoundary)
|
||||
|
||||
data = append(data, "Content-Type: multipart/related; boundary="+relatedBoundary)
|
||||
data = append(data, "")
|
||||
|
||||
data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, relatedBoundary, altBoundary)...)
|
||||
data = append(data, "")
|
||||
|
||||
for i, attachment := range inlineAttachments {
|
||||
data = append(data, "--"+relatedBoundary)
|
||||
data = append(data, attachment.dump()...)
|
||||
|
||||
if i < len(inlineAttachments)-1 {
|
||||
data = append(data, "")
|
||||
}
|
||||
}
|
||||
|
||||
data = append(data, "--"+relatedBoundary+"--")
|
||||
|
||||
for i, attachment := range normalAttachments {
|
||||
data = append(data, "--"+mixedBoundary)
|
||||
data = append(data, attachment.dump()...)
|
||||
|
||||
if i < len(normalAttachments)-1 {
|
||||
data = append(data, "")
|
||||
}
|
||||
}
|
||||
|
||||
data = append(data, "--"+mixedBoundary+"--")
|
||||
|
||||
} else if hasNormalAttachments {
|
||||
// "mixed"
|
||||
|
||||
data = append(data, "Content-Type: multipart/mixed; boundary="+mixedBoundary)
|
||||
data = append(data, "")
|
||||
|
||||
data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, mixedBoundary, altBoundary)...)
|
||||
if hasPlain && hasHTML {
|
||||
data = append(data, "")
|
||||
}
|
||||
|
||||
for i, attachment := range normalAttachments {
|
||||
data = append(data, "--"+mixedBoundary)
|
||||
data = append(data, attachment.dump()...)
|
||||
|
||||
if i < len(normalAttachments)-1 {
|
||||
data = append(data, "")
|
||||
}
|
||||
}
|
||||
|
||||
data = append(data, "--"+mixedBoundary+"--")
|
||||
|
||||
} else if hasInlineAttachments {
|
||||
// "related"
|
||||
|
||||
data = append(data, "Content-Type: multipart/related; boundary="+relatedBoundary)
|
||||
data = append(data, "")
|
||||
|
||||
data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, relatedBoundary, altBoundary)...)
|
||||
data = append(data, "")
|
||||
|
||||
for i, attachment := range inlineAttachments {
|
||||
data = append(data, "--"+relatedBoundary)
|
||||
data = append(data, attachment.dump()...)
|
||||
|
||||
if i < len(inlineAttachments)-1 {
|
||||
data = append(data, "")
|
||||
}
|
||||
}
|
||||
|
||||
data = append(data, "--"+relatedBoundary+"--")
|
||||
|
||||
} else if hasPlain && hasHTML {
|
||||
// "alternative"
|
||||
|
||||
data = append(data, "Content-Type: multipart/alternative; boundary="+altBoundary)
|
||||
data = append(data, "")
|
||||
|
||||
data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, altBoundary, altBoundary)...)
|
||||
data = append(data, "")
|
||||
|
||||
data = append(data, "--"+altBoundary+"--")
|
||||
|
||||
} else if hasPlain {
|
||||
// "plain"
|
||||
|
||||
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, body.Plain)
|
||||
|
||||
} else if hasHTML {
|
||||
// "plain"
|
||||
|
||||
data = append(data, "Content-Type: text/html; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, body.HTML)
|
||||
|
||||
} else {
|
||||
// "empty??"
|
||||
|
||||
}
|
||||
|
||||
return strings.Join(data, "\r\n")
|
||||
}
|
||||
|
||||
func dumpMailBody(body MailBody, hasInlineAttachments bool, hasNormalAttachments bool, boundary string, boundaryAlt string) []string {
|
||||
|
||||
if body.HTML != "" && body.Plain != "" && !hasInlineAttachments && hasNormalAttachments {
|
||||
data := make([]string, 0, 16)
|
||||
data = append(data, "--"+boundary)
|
||||
data = append(data, "Content-Type: multipart/alternative; boundary="+boundaryAlt)
|
||||
data = append(data, "")
|
||||
data = append(data, "--"+boundaryAlt)
|
||||
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, body.Plain)
|
||||
data = append(data, "")
|
||||
data = append(data, "--"+boundaryAlt)
|
||||
data = append(data, "Content-Type: text/html; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, body.HTML)
|
||||
data = append(data, "")
|
||||
data = append(data, "--"+boundaryAlt+"--")
|
||||
return data
|
||||
}
|
||||
|
||||
if body.HTML != "" && body.Plain != "" && hasInlineAttachments {
|
||||
data := make([]string, 0, 2)
|
||||
data = append(data, "--"+boundary)
|
||||
data = append(data, body.HTML)
|
||||
return data
|
||||
}
|
||||
|
||||
if body.HTML != "" && body.Plain != "" {
|
||||
data := make([]string, 0, 8)
|
||||
data = append(data, "--"+boundary)
|
||||
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, body.Plain)
|
||||
data = append(data, "")
|
||||
data = append(data, "--"+boundary)
|
||||
data = append(data, "Content-Type: text/html; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, body.HTML)
|
||||
return data
|
||||
}
|
||||
|
||||
if body.HTML != "" {
|
||||
data := make([]string, 0, 2)
|
||||
data = append(data, "--"+boundary)
|
||||
data = append(data, "Content-Type: text/html; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, body.HTML)
|
||||
return data
|
||||
}
|
||||
|
||||
if body.Plain != "" {
|
||||
data := make([]string, 0, 2)
|
||||
data = append(data, "--"+boundary)
|
||||
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, body.Plain)
|
||||
return data
|
||||
}
|
||||
|
||||
data := make([]string, 0, 16)
|
||||
data = append(data, "--"+boundary)
|
||||
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||
data = append(data, "")
|
||||
data = append(data, "") // no content ?!?
|
||||
return data
|
||||
}
|
@@ -1,77 +0,0 @@
|
||||
package googleapi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestEncodeMimeMail(t *testing.T) {
|
||||
|
||||
mail := encodeMimeMail(
|
||||
"noreply@heydyno.de",
|
||||
[]string{"trash@mikescher.de"},
|
||||
nil,
|
||||
nil,
|
||||
"Hello Test Mail",
|
||||
MailBody{Plain: "Plain Text"},
|
||||
nil)
|
||||
|
||||
fmt.Printf("%s\n\n", mail)
|
||||
}
|
||||
|
||||
func TestEncodeMimeMail2(t *testing.T) {
|
||||
|
||||
mail := encodeMimeMail(
|
||||
"noreply@heydyno.de",
|
||||
[]string{"trash@mikescher.de"},
|
||||
nil,
|
||||
nil,
|
||||
"Hello Test Mail (alternative)",
|
||||
MailBody{
|
||||
Plain: "Plain Text",
|
||||
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||
},
|
||||
nil)
|
||||
|
||||
fmt.Printf("%s\n\n", mail)
|
||||
}
|
||||
|
||||
func TestEncodeMimeMail3(t *testing.T) {
|
||||
|
||||
mail := encodeMimeMail(
|
||||
"noreply@heydyno.de",
|
||||
[]string{"trash@mikescher.de"},
|
||||
nil,
|
||||
nil,
|
||||
"Hello Test Mail (alternative)",
|
||||
MailBody{
|
||||
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||
},
|
||||
[]MailAttachment{
|
||||
{Data: []byte("HelloWorld"), Filename: "test.txt", IsInline: false, ContentType: "text/plain"},
|
||||
})
|
||||
|
||||
fmt.Printf("%s\n\n", mail)
|
||||
}
|
||||
|
||||
func TestEncodeMimeMail4(t *testing.T) {
|
||||
|
||||
b := tst.Must(os.ReadFile("/home/mike/Pictures/Screenshot_20220706_190205.png"))(t)
|
||||
|
||||
mail := encodeMimeMail(
|
||||
"noreply@heydyno.de",
|
||||
[]string{"trash@mikescher.de"},
|
||||
nil,
|
||||
nil,
|
||||
"Hello Test Mail (inline)",
|
||||
MailBody{
|
||||
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||
},
|
||||
[]MailAttachment{
|
||||
{Data: b, Filename: "img.png", IsInline: true, ContentType: "image/png"},
|
||||
})
|
||||
|
||||
fmt.Printf("%s\n\n", mail)
|
||||
}
|
@@ -1,91 +0,0 @@
|
||||
package googleapi
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
|
||||
"io"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type GoogleOAuth interface {
|
||||
AccessToken() (string, error)
|
||||
}
|
||||
|
||||
type oauth struct {
|
||||
clientID string
|
||||
clientSecret string
|
||||
refreshToken string
|
||||
|
||||
lock sync.RWMutex
|
||||
accessToken *string
|
||||
expiryDate *time.Time
|
||||
}
|
||||
|
||||
func NewGoogleOAuth(clientid string, clientsecret, refreshtoken string) GoogleOAuth {
|
||||
return &oauth{
|
||||
clientID: clientid,
|
||||
clientSecret: clientsecret,
|
||||
refreshToken: refreshtoken,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *oauth) AccessToken() (string, error) {
|
||||
c.lock.RLock()
|
||||
if c.accessToken != nil && c.expiryDate != nil && (*c.expiryDate).After(time.Now()) {
|
||||
c.lock.RUnlock()
|
||||
return *c.accessToken, nil // still valid
|
||||
}
|
||||
c.lock.RUnlock()
|
||||
|
||||
httpclient := http.Client{}
|
||||
|
||||
url := fmt.Sprintf("https://oauth2.googleapis.com/token?client_id=%s&client_secret=%s&grant_type=%s&refresh_token=%s",
|
||||
c.clientID,
|
||||
c.clientSecret,
|
||||
"refresh_token",
|
||||
c.refreshToken)
|
||||
|
||||
req, err := http.NewRequest(http.MethodPost, url, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
reqStartTime := time.Now()
|
||||
|
||||
res, err := httpclient.Do(req)
|
||||
|
||||
type response struct {
|
||||
AccessToken string `json:"access_token"`
|
||||
ExpiresIn int `json:"expires_in"`
|
||||
Scope string `json:"scope"`
|
||||
TokenType string `json:"token_type"`
|
||||
}
|
||||
|
||||
var r response
|
||||
|
||||
data, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(data, &r)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if r.ExpiresIn == 0 || r.AccessToken == "" {
|
||||
return "", exerr.New(exerr.TypeGoogleResponse, "google oauth returned no response").Str("body", string(data)).Build()
|
||||
}
|
||||
|
||||
c.lock.Lock()
|
||||
c.expiryDate = langext.Ptr(reqStartTime.Add(timeext.FromSeconds(r.ExpiresIn - 10)))
|
||||
c.accessToken = langext.Ptr(r.AccessToken)
|
||||
c.lock.Unlock()
|
||||
|
||||
return r.AccessToken, nil
|
||||
}
|
@@ -1,69 +0,0 @@
|
||||
package googleapi
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type MailRef struct {
|
||||
ID string `json:"id"`
|
||||
ThreadID string `json:"threadId"`
|
||||
LabelIDs []string `json:"labelIds"`
|
||||
}
|
||||
|
||||
func (c *client) SendMail(ctx context.Context, from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) (MailRef, error) {
|
||||
|
||||
mm := encodeMimeMail(from, recipients, cc, bcc, subject, body, attachments)
|
||||
|
||||
tok, err := c.oauth.AccessToken()
|
||||
if err != nil {
|
||||
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("https://gmail.googleapis.com/gmail/v1/users/%s/messages/send?alt=json&prettyPrint=false", "me")
|
||||
|
||||
msgbody, err := json.Marshal(langext.H{"raw": base64.URLEncoding.EncodeToString([]byte(mm))})
|
||||
if err != nil {
|
||||
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(msgbody))
|
||||
if err != nil {
|
||||
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||
}
|
||||
|
||||
req.Header.Add("Authorization", "Bearer "+tok)
|
||||
req.Header.Add("X-Goog-Api-Client", "blackforestbytes-goext/"+goext.GoextVersion)
|
||||
req.Header.Add("User-Agent", "blackforestbytes-goext/"+goext.GoextVersion)
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||
}
|
||||
|
||||
respBody, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||
}
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return MailRef{}, exerr.New(exerr.TypeGoogleStatuscode, "gmail returned non-200 statuscode").Int("sc", resp.StatusCode).Str("body", string(respBody)).Build()
|
||||
}
|
||||
|
||||
var respObj MailRef
|
||||
err = json.Unmarshal(respBody, &respObj)
|
||||
if err != nil {
|
||||
return MailRef{}, exerr.Wrap(err, "").Str("body", string(respBody)).Build()
|
||||
}
|
||||
|
||||
return respObj, nil
|
||||
}
|
@@ -1,151 +0,0 @@
|
||||
package googleapi
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
if !exerr.Initialized() {
|
||||
exerr.Init(exerr.ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse})
|
||||
}
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
func TestSendMail1(t *testing.T) {
|
||||
t.Skip()
|
||||
return
|
||||
|
||||
auth := NewGoogleOAuth(
|
||||
"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com",
|
||||
"TODO",
|
||||
"TODO")
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
gclient := NewGoogleClient(auth)
|
||||
|
||||
mail, err := gclient.SendMail(
|
||||
ctx,
|
||||
"noreply@heydyno.de",
|
||||
[]string{"trash@mikescher.de"},
|
||||
nil,
|
||||
nil,
|
||||
"Hello Test Mail",
|
||||
MailBody{Plain: "Plain Text"},
|
||||
nil)
|
||||
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
fmt.Printf("mail.ID := %s\n", mail.ID)
|
||||
fmt.Printf("mail.ThreadID := %s\n", mail.ThreadID)
|
||||
fmt.Printf("mail.LabelIDs := %v\n", mail.LabelIDs)
|
||||
}
|
||||
|
||||
func TestSendMail2(t *testing.T) {
|
||||
t.Skip()
|
||||
return
|
||||
|
||||
auth := NewGoogleOAuth(
|
||||
"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com",
|
||||
"TODO",
|
||||
"TODO")
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
gclient := NewGoogleClient(auth)
|
||||
|
||||
mail, err := gclient.SendMail(
|
||||
ctx,
|
||||
"noreply@heydyno.de",
|
||||
[]string{"trash@mikescher.de"},
|
||||
nil,
|
||||
nil,
|
||||
"Hello Test Mail (alternative)",
|
||||
MailBody{
|
||||
Plain: "Plain Text",
|
||||
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||
},
|
||||
nil)
|
||||
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
fmt.Printf("mail.ID := %s\n", mail.ID)
|
||||
fmt.Printf("mail.ThreadID := %s\n", mail.ThreadID)
|
||||
fmt.Printf("mail.LabelIDs := %v\n", mail.LabelIDs)
|
||||
}
|
||||
|
||||
func TestSendMail3(t *testing.T) {
|
||||
t.Skip()
|
||||
return
|
||||
|
||||
auth := NewGoogleOAuth(
|
||||
"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com",
|
||||
"TODO",
|
||||
"TODO")
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
gclient := NewGoogleClient(auth)
|
||||
|
||||
mail, err := gclient.SendMail(
|
||||
ctx,
|
||||
"noreply@heydyno.de",
|
||||
[]string{"trash@mikescher.de"},
|
||||
nil,
|
||||
nil,
|
||||
"Hello Test Mail (attach)",
|
||||
MailBody{
|
||||
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||
},
|
||||
[]MailAttachment{
|
||||
{Data: []byte("HelloWorld"), Filename: "test.txt", IsInline: false, ContentType: "text/plain"},
|
||||
})
|
||||
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
fmt.Printf("mail.ID := %s\n", mail.ID)
|
||||
fmt.Printf("mail.ThreadID := %s\n", mail.ThreadID)
|
||||
fmt.Printf("mail.LabelIDs := %v\n", mail.LabelIDs)
|
||||
}
|
||||
|
||||
func TestSendMail4(t *testing.T) {
|
||||
t.Skip()
|
||||
return
|
||||
|
||||
auth := NewGoogleOAuth(
|
||||
"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com",
|
||||
"TODO",
|
||||
"TODO")
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
gclient := NewGoogleClient(auth)
|
||||
|
||||
b := tst.Must(os.ReadFile("/home/mike/Pictures/Screenshot_20220706_190205.png"))(t)
|
||||
|
||||
mail, err := gclient.SendMail(
|
||||
ctx,
|
||||
"noreply@heydyno.de",
|
||||
[]string{"trash@mikescher.de"},
|
||||
nil,
|
||||
nil,
|
||||
"Hello Test Mail (inline)",
|
||||
MailBody{
|
||||
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||
},
|
||||
[]MailAttachment{
|
||||
{Data: b, Filename: "img.png", IsInline: true, ContentType: "image/png"},
|
||||
})
|
||||
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
fmt.Printf("mail.ID := %s\n", mail.ID)
|
||||
fmt.Printf("mail.ThreadID := %s\n", mail.ThreadID)
|
||||
fmt.Printf("mail.LabelIDs := %v\n", mail.LabelIDs)
|
||||
}
|
@@ -1,22 +0,0 @@
|
||||
package googleapi
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type GoogleClient interface {
|
||||
SendMail(ctx context.Context, from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) (MailRef, error)
|
||||
}
|
||||
|
||||
type client struct {
|
||||
oauth GoogleOAuth
|
||||
http http.Client
|
||||
}
|
||||
|
||||
func NewGoogleClient(oauth GoogleOAuth) GoogleClient {
|
||||
return &client{
|
||||
oauth: oauth,
|
||||
http: http.Client{},
|
||||
}
|
||||
}
|
@@ -400,7 +400,7 @@ func ArrCastErr[T1 any, T2 any](arr []T1) ([]T2, error) {
|
||||
if vcast, ok := any(v).(T2); ok {
|
||||
r[i] = vcast
|
||||
} else {
|
||||
return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2)))
|
||||
return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2)))
|
||||
}
|
||||
}
|
||||
return r, nil
|
||||
@@ -412,7 +412,7 @@ func ArrCastPanic[T1 any, T2 any](arr []T1) []T2 {
|
||||
if vcast, ok := any(v).(T2); ok {
|
||||
r[i] = vcast
|
||||
} else {
|
||||
panic(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2)))
|
||||
panic(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2)))
|
||||
}
|
||||
}
|
||||
return r
|
||||
@@ -440,42 +440,3 @@ func ArrCopy[T any](in []T) []T {
|
||||
copy(out, in)
|
||||
return out
|
||||
}
|
||||
|
||||
func ArrRemove[T comparable](arr []T, needle T) []T {
|
||||
idx := ArrFirstIndex(arr, needle)
|
||||
if idx >= 0 {
|
||||
return append(arr[:idx], arr[idx+1:]...)
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
func ArrExcept[T comparable](arr []T, needles ...T) []T {
|
||||
r := make([]T, 0, len(arr))
|
||||
rmlist := ArrToSet(needles)
|
||||
for _, v := range arr {
|
||||
if _, ok := rmlist[v]; !ok {
|
||||
r = append(r, v)
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func ArrayToInterface[T any](t []T) []interface{} {
|
||||
res := make([]interface{}, 0, len(t))
|
||||
for i, _ := range t {
|
||||
res = append(res, t[i])
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func JoinString(arr []string, delimiter string) string {
|
||||
str := ""
|
||||
for i, v := range arr {
|
||||
str += v
|
||||
if i < len(arr)-1 {
|
||||
str += delimiter
|
||||
}
|
||||
}
|
||||
|
||||
return str
|
||||
}
|
||||
|
@@ -1,12 +0,0 @@
|
||||
package langext
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestJoinString(t *testing.T) {
|
||||
ids := []string{"1", "2", "3"}
|
||||
res := JoinString(ids, ",")
|
||||
tst.AssertEqual(t, res, "1,2,3")
|
||||
}
|
@@ -1,7 +1,6 @@
|
||||
package langext
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@@ -60,3 +59,9 @@ func TestBase58FlickrDecoding(t *testing.T) {
|
||||
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "9aJCVZR"), "Hello")
|
||||
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "48638rmBiUzG5NKQoX4KcuE5C8paCFACnE28F7qDx13PRtennAmYSSJQ5gJSRihf5ZDyEQS4UimtihR7uARt4wbty2fW9duTQTM9n1DwUBevreyzGwu6W4YSgrvQgCPDxsiE1mCdZsF8VEBpuHHEiJyw"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.")
|
||||
}
|
||||
|
||||
func tst.AssertEqual(t *testing.T, actual string, expected string) {
|
||||
if actual != expected {
|
||||
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
|
||||
}
|
||||
}
|
||||
|
@@ -1,178 +0,0 @@
|
||||
package langext
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"errors"
|
||||
"math"
|
||||
"math/big"
|
||||
)
|
||||
|
||||
type AnyBaseConverter struct {
|
||||
base uint64
|
||||
charset []rune
|
||||
}
|
||||
|
||||
func NewAnyBaseConverter(cs string) AnyBaseConverter {
|
||||
rcs := []rune(cs)
|
||||
return AnyBaseConverter{
|
||||
base: uint64(len(rcs)),
|
||||
charset: rcs,
|
||||
}
|
||||
}
|
||||
|
||||
func (bc AnyBaseConverter) Rand(rlen int) string {
|
||||
biBase := big.NewInt(int64(bc.base))
|
||||
|
||||
randMax := big.NewInt(math.MaxInt64)
|
||||
|
||||
r := ""
|
||||
|
||||
for i := 0; i < rlen; i++ {
|
||||
v, err := rand.Int(rand.Reader, randMax)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
r += string(bc.charset[v.Mod(v, biBase).Int64()])
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (bc AnyBaseConverter) EncodeUInt64(num uint64) string {
|
||||
if num == 0 {
|
||||
return "0"
|
||||
}
|
||||
|
||||
b := ""
|
||||
|
||||
// loop as long the num is bigger than zero
|
||||
for num > 0 {
|
||||
r := num % bc.base
|
||||
|
||||
num -= r
|
||||
num /= base62Base
|
||||
|
||||
b += string(bc.charset[int(r)])
|
||||
}
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
func (bc AnyBaseConverter) DecodeUInt64(str string) (uint64, error) {
|
||||
if str == "" {
|
||||
return 0, errors.New("empty string")
|
||||
}
|
||||
|
||||
result := uint64(0)
|
||||
|
||||
for _, v := range str {
|
||||
result *= base62Base
|
||||
|
||||
pos := ArrFirstIndex(bc.charset, v)
|
||||
if pos == -1 {
|
||||
return 0, errors.New("invalid character: " + string(v))
|
||||
}
|
||||
|
||||
result += uint64(pos)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (bc AnyBaseConverter) Encode(src []byte) string {
|
||||
value := new(big.Int)
|
||||
value.SetBytes(src)
|
||||
return bc.EncodeBigInt(value)
|
||||
}
|
||||
|
||||
func (bc AnyBaseConverter) EncodeBigInt(src *big.Int) string {
|
||||
value := new(big.Int)
|
||||
value.Set(src)
|
||||
|
||||
isneg := value.Sign() < 0
|
||||
|
||||
answer := ""
|
||||
|
||||
if isneg {
|
||||
value.Neg(value)
|
||||
}
|
||||
|
||||
biBase := big.NewInt(int64(bc.base))
|
||||
|
||||
rem := new(big.Int)
|
||||
|
||||
for value.Sign() > 0 {
|
||||
value.QuoRem(value, biBase, rem)
|
||||
answer = string(bc.charset[rem.Int64()]) + answer
|
||||
}
|
||||
|
||||
if isneg {
|
||||
return "-" + answer
|
||||
} else {
|
||||
return answer
|
||||
}
|
||||
}
|
||||
|
||||
func (bc AnyBaseConverter) Decode(src string) ([]byte, error) {
|
||||
value, err := bc.DecodeToBigInt(src)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return value.Bytes(), nil
|
||||
}
|
||||
|
||||
func (bc AnyBaseConverter) DecodeToBigInt(_src string) (*big.Int, error) {
|
||||
result := new(big.Int)
|
||||
result.SetInt64(0)
|
||||
|
||||
src := []rune(_src)
|
||||
|
||||
if len(src) == 0 {
|
||||
return nil, errors.New("string is empty")
|
||||
}
|
||||
if bc.base < 2 {
|
||||
return nil, errors.New("not enough digits")
|
||||
}
|
||||
|
||||
i := 0
|
||||
|
||||
sign := new(big.Int)
|
||||
sign.SetInt64(1)
|
||||
if src[i] == '+' {
|
||||
i++
|
||||
} else if src[i] == '-' {
|
||||
i++
|
||||
sign.SetInt64(-1)
|
||||
}
|
||||
|
||||
if i >= len(src) {
|
||||
return nil, errors.New("no digits in input")
|
||||
}
|
||||
|
||||
biBase := big.NewInt(int64(bc.base))
|
||||
|
||||
oldResult := new(big.Int)
|
||||
|
||||
for ; i < len(src); i++ {
|
||||
n := ArrFirstIndex(bc.charset, src[i])
|
||||
if n < 0 {
|
||||
return nil, errors.New("invalid characters in input")
|
||||
}
|
||||
|
||||
oldResult.Set(result)
|
||||
|
||||
result.Mul(result, biBase)
|
||||
result.Add(result, big.NewInt(int64(n)))
|
||||
|
||||
if result.Cmp(oldResult) < 0 {
|
||||
return nil, errors.New("overflow")
|
||||
}
|
||||
}
|
||||
|
||||
if sign.Cmp(big.NewInt(0)) < 0 {
|
||||
result.Neg(result)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
@@ -1,80 +0,0 @@
|
||||
package langext
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func _anyEncStr(bc AnyBaseConverter, v string) string {
|
||||
vr := bc.Encode([]byte(v))
|
||||
return vr
|
||||
}
|
||||
|
||||
func _anyDecStr(bc AnyBaseConverter, v string) string {
|
||||
vr, err := bc.Decode(v)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(vr)
|
||||
}
|
||||
|
||||
func TestAnyBase58DefaultEncoding(t *testing.T) {
|
||||
tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "Hello"), "9Ajdvzr")
|
||||
tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in."), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX")
|
||||
}
|
||||
|
||||
func TestAnyBase58DefaultDecoding(t *testing.T) {
|
||||
tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "9Ajdvzr"), "Hello")
|
||||
tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.")
|
||||
}
|
||||
|
||||
func TestAnyBaseDecode(t *testing.T) {
|
||||
|
||||
const (
|
||||
Binary = "01"
|
||||
Decimal = "0123456789"
|
||||
Hex = "0123456789ABCDEF"
|
||||
DNA = "ACGT"
|
||||
Base32 = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"
|
||||
Base58 = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
|
||||
Base62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
Base64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||
Base256 = "🚀🪐☄🛰🌌🌑🌒🌓🌔🌕🌖🌗🌘🌍🌏🌎🐉☀💻🖥💾💿😂❤😍🤣😊🙏💕😭😘👍😅👏😁🔥🥰💔💖💙😢🤔😆🙄💪😉☺👌🤗💜😔😎😇🌹🤦🎉💞✌✨🤷😱😌🌸🙌😋💗💚😏💛🙂💓🤩😄😀🖤😃💯🙈👇🎶😒🤭❣😜💋👀😪😑💥🙋😞😩😡🤪👊🥳😥🤤👉💃😳✋😚😝😴🌟😬🙃🍀🌷😻😓⭐✅🥺🌈😈🤘💦✔😣🏃💐☹🎊💘😠☝😕🌺🎂🌻😐🖕💝🙊😹🗣💫💀👑🎵🤞😛🔴😤🌼😫⚽🤙☕🏆🤫👈😮🙆🍻🍃🐶💁😲🌿🧡🎁⚡🌞🎈❌✊👋😰🤨😶🤝🚶💰🍓💢🤟🙁🚨💨🤬✈🎀🍺🤓😙💟🌱😖👶🥴▶➡❓💎💸⬇😨🌚🦋😷🕺⚠🙅😟😵👎🤲🤠🤧📌🔵💅🧐🐾🍒😗🤑🌊🤯🐷☎💧😯💆👆🎤🙇🍑❄🌴💣🐸💌📍🥀🤢👅💡💩👐📸👻🤐🤮🎼🥵🚩🍎🍊👼💍📣🥂"
|
||||
)
|
||||
|
||||
type TestDef struct {
|
||||
FromCS string
|
||||
FromVal string
|
||||
ToCS string
|
||||
ToVal string
|
||||
}
|
||||
|
||||
defs := []TestDef{
|
||||
{Binary, "10100101011100000101010", Decimal, "5421098"},
|
||||
{Decimal, "5421098", DNA, "CCAGGTGAAGGG"},
|
||||
{Decimal, "5421098", DNA, "CCAGGTGAAGGG"},
|
||||
{Decimal, "80085", Base256, "🪐💞🔵"},
|
||||
{Hex, "48656C6C6C20576F526C5421", Base64, "SGVsbGwgV29SbFQh"},
|
||||
{Base64, "SGVsbGw/gV29SbF+Qh", Base32, "CIMVWGY3B7QFO32SNRPZBB"},
|
||||
{Base64, "SGVsbGw/gV29SbF+Qh", Base58, "2fUsGKQUcgQcwSqpvy6"},
|
||||
{Base64, "SGVsbGw/gV29SbF+Qh", Base62, "V34nvybdQ3m3RHk9Sr"},
|
||||
}
|
||||
|
||||
for _, def := range defs {
|
||||
|
||||
d1 := NewAnyBaseConverter(def.FromCS)
|
||||
d2 := NewAnyBaseConverter(def.ToCS)
|
||||
|
||||
v1 := tst.Must(d1.Decode(def.FromVal))(t)
|
||||
v2 := tst.Must(d2.Decode(def.ToVal))(t)
|
||||
|
||||
tst.AssertArrayEqual(t, v1, v2)
|
||||
|
||||
str2 := d2.Encode(v1)
|
||||
tst.AssertEqual(t, str2, def.ToVal)
|
||||
|
||||
str1 := d1.Encode(v2)
|
||||
tst.AssertEqual(t, str1, def.FromVal)
|
||||
|
||||
}
|
||||
}
|
@@ -29,14 +29,6 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
|
||||
return result
|
||||
}
|
||||
|
||||
func ArrToSet[T comparable](a []T) map[T]bool {
|
||||
result := make(map[T]bool, len(a))
|
||||
for _, v := range a {
|
||||
result[v] = true
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func MapToArr[T comparable, V any](v map[T]V) []MapEntry[T, V] {
|
||||
result := make([]MapEntry[T, V], 0, len(v))
|
||||
for mk, mv := range v {
|
||||
|
@@ -1,10 +1,7 @@
|
||||
package langext
|
||||
|
||||
import "runtime/debug"
|
||||
|
||||
type PanicWrappedErr struct {
|
||||
panic any
|
||||
Stack string
|
||||
}
|
||||
|
||||
func (p PanicWrappedErr) Error() string {
|
||||
@@ -18,7 +15,7 @@ func (p PanicWrappedErr) ReoveredObj() any {
|
||||
func RunPanicSafe(fn func()) (err error) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -30,7 +27,7 @@ func RunPanicSafe(fn func()) (err error) {
|
||||
func RunPanicSafeR1(fn func() error) (err error) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -41,7 +38,7 @@ func RunPanicSafeR2[T1 any](fn func() (T1, error)) (r1 T1, err error) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
r1 = *new(T1)
|
||||
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -53,7 +50,7 @@ func RunPanicSafeR3[T1 any, T2 any](fn func() (T1, T2, error)) (r1 T1, r2 T2, er
|
||||
if rec := recover(); rec != nil {
|
||||
r1 = *new(T1)
|
||||
r2 = *new(T2)
|
||||
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -66,7 +63,7 @@ func RunPanicSafeR4[T1 any, T2 any, T3 any](fn func() (T1, T2, T3, error)) (r1 T
|
||||
r1 = *new(T1)
|
||||
r2 = *new(T2)
|
||||
r3 = *new(T3)
|
||||
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
|
@@ -35,7 +35,7 @@ func IsNil(i interface{}) bool {
|
||||
return true
|
||||
}
|
||||
switch reflect.TypeOf(i).Kind() {
|
||||
case reflect.Ptr, reflect.Map, reflect.Chan, reflect.Slice, reflect.Func, reflect.UnsafePointer:
|
||||
case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice:
|
||||
return reflect.ValueOf(i).IsNil()
|
||||
}
|
||||
return false
|
||||
|
16
mongo/.errcheck-excludes
Normal file
16
mongo/.errcheck-excludes
Normal file
@@ -0,0 +1,16 @@
|
||||
(go.mongodb.org/mongo-driver/x/mongo/driver.Connection).Close
|
||||
(*go.mongodb.org/mongo-driver/x/network/connection.connection).Close
|
||||
(go.mongodb.org/mongo-driver/x/network/connection.Connection).Close
|
||||
(*go.mongodb.org/mongo-driver/x/mongo/driver/topology.connection).close
|
||||
(*go.mongodb.org/mongo-driver/x/mongo/driver/topology.Topology).Unsubscribe
|
||||
(*go.mongodb.org/mongo-driver/x/mongo/driver/topology.Server).Close
|
||||
(*go.mongodb.org/mongo-driver/x/network/connection.pool).closeConnection
|
||||
(*go.mongodb.org/mongo-driver/x/mongo/driver/topology.pool).close
|
||||
(go.mongodb.org/mongo-driver/x/network/wiremessage.ReadWriteCloser).Close
|
||||
(*go.mongodb.org/mongo-driver/mongo.Cursor).Close
|
||||
(*go.mongodb.org/mongo-driver/mongo.ChangeStream).Close
|
||||
(*go.mongodb.org/mongo-driver/mongo.Client).Disconnect
|
||||
(net.Conn).Close
|
||||
encoding/pem.Encode
|
||||
fmt.Fprintf
|
||||
fmt.Fprint
|
13
mongo/.gitignore
vendored
Normal file
13
mongo/.gitignore
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
.vscode
|
||||
debug
|
||||
.idea
|
||||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
.idea
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
driver-test-data.tar.gz
|
||||
perf
|
||||
**mongocryptd.pid
|
||||
*.test
|
3
mongo/.gitmodules
vendored
Normal file
3
mongo/.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "specifications"]
|
||||
path = specifications
|
||||
url = git@github.com:mongodb/specifications.git
|
123
mongo/.golangci.yml
Normal file
123
mongo/.golangci.yml
Normal file
@@ -0,0 +1,123 @@
|
||||
run:
|
||||
timeout: 5m
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
# TODO(GODRIVER-2156): Enable all commented-out linters.
|
||||
enable:
|
||||
- errcheck
|
||||
# - errorlint
|
||||
- gocritic
|
||||
- goimports
|
||||
- gosimple
|
||||
- gosec
|
||||
- govet
|
||||
- ineffassign
|
||||
- makezero
|
||||
- misspell
|
||||
- nakedret
|
||||
- paralleltest
|
||||
- prealloc
|
||||
- revive
|
||||
- staticcheck
|
||||
- typecheck
|
||||
- unused
|
||||
- unconvert
|
||||
- unparam
|
||||
|
||||
linters-settings:
|
||||
errcheck:
|
||||
exclude: .errcheck-excludes
|
||||
gocritic:
|
||||
enabled-checks:
|
||||
# Detects suspicious append result assignments. E.g. "b := append(a, 1, 2, 3)"
|
||||
- appendAssign
|
||||
govet:
|
||||
disable:
|
||||
- cgocall
|
||||
- composites
|
||||
paralleltest:
|
||||
# Ignore missing calls to `t.Parallel()` and only report incorrect uses of `t.Parallel()`.
|
||||
ignore-missing: true
|
||||
staticcheck:
|
||||
checks: [
|
||||
"all",
|
||||
"-SA1019", # Disable deprecation warnings for now.
|
||||
"-SA1012", # Disable "do not pass a nil Context" to allow testing nil contexts in tests.
|
||||
]
|
||||
|
||||
issues:
|
||||
exclude-use-default: false
|
||||
exclude:
|
||||
# Add all default excluded issues except issues related to exported types/functions not having
|
||||
# comments; we want those warnings. The defaults are copied from the "--exclude-use-default"
|
||||
# documentation on https://golangci-lint.run/usage/configuration/#command-line-options
|
||||
## Defaults ##
|
||||
# EXC0001 errcheck: Almost all programs ignore errors on these functions and in most cases it's ok
|
||||
- Error return value of .((os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*print(f|ln)?|os\.(Un)?Setenv). is not checked
|
||||
# EXC0003 golint: False positive when tests are defined in package 'test'
|
||||
- func name will be used as test\.Test.* by other packages, and that stutters; consider calling this
|
||||
# EXC0004 govet: Common false positives
|
||||
- (possible misuse of unsafe.Pointer|should have signature)
|
||||
# EXC0005 staticcheck: Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore
|
||||
- ineffective break statement. Did you mean to break out of the outer loop
|
||||
# EXC0006 gosec: Too many false-positives on 'unsafe' usage
|
||||
- Use of unsafe calls should be audited
|
||||
# EXC0007 gosec: Too many false-positives for parametrized shell calls
|
||||
- Subprocess launch(ed with variable|ing should be audited)
|
||||
# EXC0008 gosec: Duplicated errcheck checks
|
||||
- (G104|G307)
|
||||
# EXC0009 gosec: Too many issues in popular repos
|
||||
- (Expect directory permissions to be 0750 or less|Expect file permissions to be 0600 or less)
|
||||
# EXC0010 gosec: False positive is triggered by 'src, err := ioutil.ReadFile(filename)'
|
||||
- Potential file inclusion via variable
|
||||
## End Defaults ##
|
||||
|
||||
# Ignore capitalization warning for this weird field name.
|
||||
- "var-naming: struct field CqCssWxW should be CqCSSWxW"
|
||||
# Ignore warnings for common "wiremessage.Read..." usage because the safest way to use that API
|
||||
# is by assigning possibly unused returned byte buffers.
|
||||
- "SA4006: this value of `wm` is never used"
|
||||
- "SA4006: this value of `rem` is never used"
|
||||
- "ineffectual assignment to wm"
|
||||
- "ineffectual assignment to rem"
|
||||
|
||||
skip-dirs-use-default: false
|
||||
skip-dirs:
|
||||
- (^|/)vendor($|/)
|
||||
- (^|/)testdata($|/)
|
||||
- (^|/)etc($|/)
|
||||
exclude-rules:
|
||||
# Ignore some linters for example code that is intentionally simplified.
|
||||
- path: examples/
|
||||
linters:
|
||||
- revive
|
||||
- errcheck
|
||||
# Disable unused code linters for the copy/pasted "awsv4" package.
|
||||
- path: x/mongo/driver/auth/internal/awsv4
|
||||
linters:
|
||||
- unused
|
||||
# Disable "unused" linter for code files that depend on the "mongocrypt.MongoCrypt" type because
|
||||
# the linter build doesn't work correctly with CGO enabled. As a result, all calls to a
|
||||
# "mongocrypt.MongoCrypt" API appear to always panic (see mongocrypt_not_enabled.go), leading
|
||||
# to confusing messages about unused code.
|
||||
- path: x/mongo/driver/crypt.go|mongo/(crypt_retrievers|mongocryptd).go
|
||||
linters:
|
||||
- unused
|
||||
# Ignore "TLS MinVersion too low", "TLS InsecureSkipVerify set true", and "Use of weak random
|
||||
# number generator (math/rand instead of crypto/rand)" in tests.
|
||||
- path: _test\.go
|
||||
text: G401|G402|G404
|
||||
linters:
|
||||
- gosec
|
||||
# Ignore missing comments for exported variable/function/type for code in the "internal" and
|
||||
# "benchmark" directories.
|
||||
- path: (internal\/|benchmark\/)
|
||||
text: exported (.+) should have comment( \(or a comment on this block\))? or be unexported
|
||||
# Ignore missing package comments for directories that aren't frequently used by external users.
|
||||
- path: (internal\/|benchmark\/|x\/|cmd\/|mongo\/integration\/)
|
||||
text: should have a package comment
|
||||
# Disable unused linter for "golang.org/x/exp/rand" package in internal/randutil/rand.
|
||||
- path: internal/randutil/rand
|
||||
linters:
|
||||
- unused
|
201
mongo/LICENSE
Normal file
201
mongo/LICENSE
Normal file
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
210
mongo/Makefile
Normal file
210
mongo/Makefile
Normal file
@@ -0,0 +1,210 @@
|
||||
ATLAS_URIS = "$(ATLAS_FREE)" "$(ATLAS_REPLSET)" "$(ATLAS_SHARD)" "$(ATLAS_TLS11)" "$(ATLAS_TLS12)" "$(ATLAS_FREE_SRV)" "$(ATLAS_REPLSET_SRV)" "$(ATLAS_SHARD_SRV)" "$(ATLAS_TLS11_SRV)" "$(ATLAS_TLS12_SRV)" "$(ATLAS_SERVERLESS)" "$(ATLAS_SERVERLESS_SRV)"
|
||||
TEST_TIMEOUT = 1800
|
||||
|
||||
### Utility targets. ###
|
||||
.PHONY: default
|
||||
default: build check-license check-fmt check-modules lint test-short
|
||||
|
||||
.PHONY: add-license
|
||||
add-license:
|
||||
etc/check_license.sh -a
|
||||
|
||||
.PHONY: check-license
|
||||
check-license:
|
||||
etc/check_license.sh
|
||||
|
||||
.PHONY: build
|
||||
build: cross-compile build-tests build-compile-check
|
||||
go build ./...
|
||||
go build $(BUILD_TAGS) ./...
|
||||
|
||||
# Use ^$ to match no tests so that no tests are actually run but all tests are
|
||||
# compiled. Run with -short to ensure none of the TestMain functions try to
|
||||
# connect to a server.
|
||||
.PHONY: build-tests
|
||||
build-tests:
|
||||
go test -short $(BUILD_TAGS) -run ^$$ ./...
|
||||
|
||||
.PHONY: build-compile-check
|
||||
build-compile-check:
|
||||
etc/compile_check.sh
|
||||
|
||||
# Cross-compiling on Linux for architectures 386, arm, arm64, amd64, ppc64le, and s390x.
|
||||
# Omit any build tags because we don't expect our build environment to support compiling the C
|
||||
# libraries for other architectures.
|
||||
.PHONY: cross-compile
|
||||
cross-compile:
|
||||
GOOS=linux GOARCH=386 go build ./...
|
||||
GOOS=linux GOARCH=arm go build ./...
|
||||
GOOS=linux GOARCH=arm64 go build ./...
|
||||
GOOS=linux GOARCH=amd64 go build ./...
|
||||
GOOS=linux GOARCH=ppc64le go build ./...
|
||||
GOOS=linux GOARCH=s390x go build ./...
|
||||
|
||||
.PHONY: install-lll
|
||||
install-lll:
|
||||
go install github.com/walle/lll/...@latest
|
||||
|
||||
.PHONY: check-fmt
|
||||
check-fmt: install-lll
|
||||
etc/check_fmt.sh
|
||||
|
||||
# check-modules runs "go mod tidy" then "go mod vendor" and exits with a non-zero exit code if there
|
||||
# are any module or vendored modules changes. The intent is to confirm two properties:
|
||||
#
|
||||
# 1. Exactly the required modules are declared as dependencies. We should always be able to run
|
||||
# "go mod tidy" and expect that no unrelated changes are made to the "go.mod" file.
|
||||
#
|
||||
# 2. All required modules are copied into the vendor/ directory and are an exact copy of the
|
||||
# original module source code (i.e. the vendored modules are not modified from their original code).
|
||||
.PHONY: check-modules
|
||||
check-modules:
|
||||
go mod tidy -v
|
||||
go mod vendor
|
||||
git diff --exit-code go.mod go.sum ./vendor
|
||||
|
||||
.PHONY: doc
|
||||
doc:
|
||||
godoc -http=:6060 -index
|
||||
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
go fmt ./...
|
||||
|
||||
.PHONY: install-golangci-lint
|
||||
install-golangci-lint:
|
||||
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.51.0
|
||||
|
||||
# Lint with various GOOS and GOARCH targets to catch static analysis failures that may only affect
|
||||
# specific operating systems or architectures. For example, staticcheck will only check for 64-bit
|
||||
# alignment of atomically accessed variables on 32-bit architectures (see
|
||||
# https://staticcheck.io/docs/checks#SA1027)
|
||||
.PHONY: lint
|
||||
lint: install-golangci-lint
|
||||
GOOS=linux GOARCH=386 golangci-lint run --config .golangci.yml ./...
|
||||
GOOS=linux GOARCH=arm golangci-lint run --config .golangci.yml ./...
|
||||
GOOS=linux GOARCH=arm64 golangci-lint run --config .golangci.yml ./...
|
||||
GOOS=linux GOARCH=amd64 golangci-lint run --config .golangci.yml ./...
|
||||
GOOS=linux GOARCH=ppc64le golangci-lint run --config .golangci.yml ./...
|
||||
GOOS=linux GOARCH=s390x golangci-lint run --config .golangci.yml ./...
|
||||
|
||||
.PHONY: update-notices
|
||||
update-notices:
|
||||
etc/generate_notices.pl > THIRD-PARTY-NOTICES
|
||||
|
||||
### Local testing targets. ###
|
||||
.PHONY: test
|
||||
test:
|
||||
go test $(BUILD_TAGS) -timeout $(TEST_TIMEOUT)s -p 1 ./...
|
||||
|
||||
.PHONY: test-cover
|
||||
test-cover:
|
||||
go test $(BUILD_TAGS) -timeout $(TEST_TIMEOUT)s -cover $(COVER_ARGS) -p 1 ./...
|
||||
|
||||
.PHONY: test-race
|
||||
test-race:
|
||||
go test $(BUILD_TAGS) -timeout $(TEST_TIMEOUT)s -race -p 1 ./...
|
||||
|
||||
.PHONY: test-short
|
||||
test-short:
|
||||
go test $(BUILD_TAGS) -timeout 60s -short ./...
|
||||
|
||||
### Evergreen specific targets. ###
|
||||
.PHONY: build-aws-ecs-test
|
||||
build-aws-ecs-test:
|
||||
go build $(BUILD_TAGS) ./cmd/testaws/main.go
|
||||
|
||||
.PHONY: evg-test
|
||||
evg-test:
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s -p 1 ./... >> test.suite
|
||||
|
||||
.PHONY: evg-test-atlas
|
||||
evg-test-atlas:
|
||||
go run ./cmd/testatlas/main.go $(ATLAS_URIS)
|
||||
|
||||
.PHONY: evg-test-atlas-data-lake
|
||||
evg-test-atlas-data-lake:
|
||||
ATLAS_DATA_LAKE_INTEGRATION_TEST=true go test -v ./mongo/integration -run TestUnifiedSpecs/atlas-data-lake-testing >> spec_test.suite
|
||||
ATLAS_DATA_LAKE_INTEGRATION_TEST=true go test -v ./mongo/integration -run TestAtlasDataLake >> spec_test.suite
|
||||
|
||||
.PHONY: evg-test-enterprise-auth
|
||||
evg-test-enterprise-auth:
|
||||
go run -tags gssapi ./cmd/testentauth/main.go
|
||||
|
||||
.PHONY: evg-test-kmip
|
||||
evg-test-kmip:
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionSpec/kmipKMS >> test.suite
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/data_key_and_double_encryption >> test.suite
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/corpus >> test.suite
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/custom_endpoint >> test.suite
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/kms_tls_options_test >> test.suite
|
||||
|
||||
.PHONY: evg-test-kms
|
||||
evg-test-kms:
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/kms_tls_tests >> test.suite
|
||||
|
||||
.PHONY: evg-test-load-balancers
|
||||
evg-test-load-balancers:
|
||||
# Load balancer should be tested with all unified tests as well as tests in the following
|
||||
# components: retryable reads, retryable writes, change streams, initial DNS seedlist discovery.
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestUnifiedSpecs/retryable-reads -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestRetryableWritesSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestChangeStreamSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestInitialDNSSeedlistDiscoverySpec/load_balanced -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestLoadBalancerSupport -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration/unified -run TestUnifiedSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
|
||||
.PHONY: evg-test-ocsp
|
||||
evg-test-ocsp:
|
||||
go test -v ./mongo -run TestOCSP $(OCSP_TLS_SHOULD_SUCCEED) >> test.suite
|
||||
|
||||
.PHONY: evg-test-serverless
|
||||
evg-test-serverless:
|
||||
# Serverless should be tested with all unified tests as well as tests in the following components: CRUD, load balancer,
|
||||
# retryable reads, retryable writes, sessions, transactions and cursor behavior.
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestCrudSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestWriteErrorsWithLabels -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestWriteErrorsDetails -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestHintErrors -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestWriteConcernError -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestErrorsCodeNamePropagated -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestLoadBalancerSupport -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestUnifiedSpecs/retryable-reads -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestRetryableReadsProse -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestRetryableWritesSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestRetryableWritesProse -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestUnifiedSpecs/sessions -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestSessionsProse -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestUnifiedSpecs/transactions/legacy -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestConvenientTransactions -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration -run TestCursor -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test $(BUILD_TAGS) ./mongo/integration/unified -run TestUnifiedSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionSpec >> test.suite
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse >> test.suite
|
||||
|
||||
.PHONY: evg-test-versioned-api
|
||||
evg-test-versioned-api:
|
||||
# Versioned API related tests are in the mongo, integration and unified packages.
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo >> test.suite
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration >> test.suite
|
||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration/unified >> test.suite
|
||||
|
||||
.PHONY: build-gcpkms-test
|
||||
build-gcpkms-test:
|
||||
go build $(BUILD_TAGS) ./cmd/testgcpkms
|
||||
|
||||
### Benchmark specific targets and support. ###
|
||||
.PHONY: benchmark
|
||||
benchmark:perf
|
||||
go test $(BUILD_TAGS) -benchmem -bench=. ./benchmark
|
||||
|
||||
.PHONY: driver-benchmark
|
||||
driver-benchmark:perf
|
||||
@go run cmd/godriver-benchmark/main.go | tee perf.suite
|
||||
|
||||
perf:driver-test-data.tar.gz
|
||||
tar -zxf $< $(if $(eq $(UNAME_S),Darwin),-s , --transform=s)/testdata/perf/
|
||||
@touch $@
|
||||
|
||||
driver-test-data.tar.gz:
|
||||
curl --retry 5 "https://s3.amazonaws.com/boxes.10gen.com/build/driver-test-data.tar.gz" -o driver-test-data.tar.gz --silent --max-time 120
|
251
mongo/README.md
Normal file
251
mongo/README.md
Normal file
@@ -0,0 +1,251 @@
|
||||
<p align="center"><img src="etc/assets/mongo-gopher.png" width="250"></p>
|
||||
<p align="center">
|
||||
<a href="https://goreportcard.com/report/go.mongodb.org/mongo-driver"><img src="https://goreportcard.com/badge/go.mongodb.org/mongo-driver"></a>
|
||||
<a href="https://pkg.go.dev/go.mongodb.org/mongo-driver/mongo"><img src="etc/assets/godev-mongo-blue.svg" alt="docs"></a>
|
||||
<a href="https://pkg.go.dev/go.mongodb.org/mongo-driver/bson"><img src="etc/assets/godev-bson-blue.svg" alt="docs"></a>
|
||||
<a href="https://www.mongodb.com/docs/drivers/go/current/"><img src="etc/assets/docs-mongodb-green.svg"></a>
|
||||
</p>
|
||||
|
||||
# MongoDB Go Driver
|
||||
|
||||
The MongoDB supported driver for Go.
|
||||
|
||||
-------------------------
|
||||
- [Requirements](#requirements)
|
||||
- [Installation](#installation)
|
||||
- [Usage](#usage)
|
||||
- [Feedback](#feedback)
|
||||
- [Testing / Development](#testing--development)
|
||||
- [Continuous Integration](#continuous-integration)
|
||||
- [License](#license)
|
||||
|
||||
-------------------------
|
||||
## Requirements
|
||||
|
||||
- Go 1.13 or higher. We aim to support the latest versions of Go.
|
||||
- Go 1.20 or higher is required to run the driver test suite.
|
||||
- MongoDB 3.6 and higher.
|
||||
|
||||
-------------------------
|
||||
## Installation
|
||||
|
||||
The recommended way to get started using the MongoDB Go driver is by using Go modules to install the dependency in
|
||||
your project. This can be done either by importing packages from `go.mongodb.org/mongo-driver` and having the build
|
||||
step install the dependency or by explicitly running
|
||||
|
||||
```bash
|
||||
go get go.mongodb.org/mongo-driver/mongo
|
||||
```
|
||||
|
||||
When using a version of Go that does not support modules, the driver can be installed using `dep` by running
|
||||
|
||||
```bash
|
||||
dep ensure -add "go.mongodb.org/mongo-driver/mongo"
|
||||
```
|
||||
|
||||
-------------------------
|
||||
## Usage
|
||||
|
||||
To get started with the driver, import the `mongo` package and create a `mongo.Client` with the `Connect` function:
|
||||
|
||||
```go
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"go.mongodb.org/mongo-driver/mongo/options"
|
||||
"go.mongodb.org/mongo-driver/mongo/readpref"
|
||||
)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
client, err := mongo.Connect(ctx, options.Client().ApplyURI("mongodb://localhost:27017"))
|
||||
```
|
||||
|
||||
Make sure to defer a call to `Disconnect` after instantiating your client:
|
||||
|
||||
```go
|
||||
defer func() {
|
||||
if err = client.Disconnect(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
```
|
||||
|
||||
For more advanced configuration and authentication, see the [documentation for mongo.Connect](https://pkg.go.dev/go.mongodb.org/mongo-driver/mongo#Connect).
|
||||
|
||||
Calling `Connect` does not block for server discovery. If you wish to know if a MongoDB server has been found and connected to,
|
||||
use the `Ping` method:
|
||||
|
||||
```go
|
||||
ctx, cancel = context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
err = client.Ping(ctx, readpref.Primary())
|
||||
```
|
||||
|
||||
To insert a document into a collection, first retrieve a `Database` and then `Collection` instance from the `Client`:
|
||||
|
||||
```go
|
||||
collection := client.Database("testing").Collection("numbers")
|
||||
```
|
||||
|
||||
The `Collection` instance can then be used to insert documents:
|
||||
|
||||
```go
|
||||
ctx, cancel = context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
res, err := collection.InsertOne(ctx, bson.D{{"name", "pi"}, {"value", 3.14159}})
|
||||
id := res.InsertedID
|
||||
```
|
||||
|
||||
To use `bson.D`, you will need to add `"go.mongodb.org/mongo-driver/bson"` to your imports.
|
||||
|
||||
Your import statement should now look like this:
|
||||
|
||||
```go
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"go.mongodb.org/mongo-driver/mongo/options"
|
||||
"go.mongodb.org/mongo-driver/mongo/readpref"
|
||||
)
|
||||
```
|
||||
|
||||
Several query methods return a cursor, which can be used like this:
|
||||
|
||||
```go
|
||||
ctx, cancel = context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
cur, err := collection.Find(ctx, bson.D{})
|
||||
if err != nil { log.Fatal(err) }
|
||||
defer cur.Close(ctx)
|
||||
for cur.Next(ctx) {
|
||||
var result bson.D
|
||||
err := cur.Decode(&result)
|
||||
if err != nil { log.Fatal(err) }
|
||||
// do something with result....
|
||||
}
|
||||
if err := cur.Err(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
```
|
||||
|
||||
For methods that return a single item, a `SingleResult` instance is returned:
|
||||
|
||||
```go
|
||||
var result struct {
|
||||
Value float64
|
||||
}
|
||||
filter := bson.D{{"name", "pi"}}
|
||||
ctx, cancel = context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
err = collection.FindOne(ctx, filter).Decode(&result)
|
||||
if err == mongo.ErrNoDocuments {
|
||||
// Do something when no record was found
|
||||
fmt.Println("record does not exist")
|
||||
} else if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
// Do something with result...
|
||||
```
|
||||
|
||||
Additional examples and documentation can be found under the examples directory and [on the MongoDB Documentation website](https://www.mongodb.com/docs/drivers/go/current/).
|
||||
|
||||
-------------------------
|
||||
## Feedback
|
||||
|
||||
For help with the driver, please post in the [MongoDB Community Forums](https://developer.mongodb.com/community/forums/tag/golang/).
|
||||
|
||||
New features and bugs can be reported on jira: https://jira.mongodb.org/browse/GODRIVER
|
||||
|
||||
-------------------------
|
||||
## Testing / Development
|
||||
|
||||
The driver tests can be run against several database configurations. The most simple configuration is a standalone mongod with no auth, no ssl, and no compression. To run these basic driver tests, make sure a standalone MongoDB server instance is running at localhost:27017. To run the tests, you can run `make` (on Windows, run `nmake`). This will run coverage, run go-lint, run go-vet, and build the examples.
|
||||
|
||||
### Testing Different Topologies
|
||||
|
||||
To test a **replica set** or **sharded cluster**, set `MONGODB_URI="<connection-string>"` for the `make` command.
|
||||
For example, for a local replica set named `rs1` comprised of three nodes on ports 27017, 27018, and 27019:
|
||||
|
||||
```
|
||||
MONGODB_URI="mongodb://localhost:27017,localhost:27018,localhost:27019/?replicaSet=rs1" make
|
||||
```
|
||||
|
||||
### Testing Auth and TLS
|
||||
|
||||
To test authentication and TLS, first set up a MongoDB cluster with auth and TLS configured. Testing authentication requires a user with the `root` role on the `admin` database. Here is an example command that would run a mongod with TLS correctly configured for tests. Either set or replace PATH_TO_SERVER_KEY_FILE and PATH_TO_CA_FILE with paths to their respective files:
|
||||
|
||||
```
|
||||
mongod \
|
||||
--auth \
|
||||
--tlsMode requireTLS \
|
||||
--tlsCertificateKeyFile $PATH_TO_SERVER_KEY_FILE \
|
||||
--tlsCAFile $PATH_TO_CA_FILE \
|
||||
--tlsAllowInvalidCertificates
|
||||
```
|
||||
|
||||
To run the tests with `make`, set:
|
||||
- `MONGO_GO_DRIVER_CA_FILE` to the location of the CA file used by the database
|
||||
- `MONGO_GO_DRIVER_KEY_FILE` to the location of the client key file
|
||||
- `MONGO_GO_DRIVER_PKCS8_ENCRYPTED_KEY_FILE` to the location of the pkcs8 client key file encrypted with the password string: `password`
|
||||
- `MONGO_GO_DRIVER_PKCS8_UNENCRYPTED_KEY_FILE` to the location of the unencrypted pkcs8 key file
|
||||
- `MONGODB_URI` to the connection string of the server
|
||||
- `AUTH=auth`
|
||||
- `SSL=ssl`
|
||||
|
||||
For example:
|
||||
|
||||
```
|
||||
AUTH=auth SSL=ssl \
|
||||
MONGO_GO_DRIVER_CA_FILE=$PATH_TO_CA_FILE \
|
||||
MONGO_GO_DRIVER_KEY_FILE=$PATH_TO_CLIENT_KEY_FILE \
|
||||
MONGO_GO_DRIVER_PKCS8_ENCRYPTED_KEY_FILE=$PATH_TO_ENCRYPTED_KEY_FILE \
|
||||
MONGO_GO_DRIVER_PKCS8_UNENCRYPTED_KEY_FILE=$PATH_TO_UNENCRYPTED_KEY_FILE \
|
||||
MONGODB_URI="mongodb://user:password@localhost:27017/?authSource=admin" \
|
||||
make
|
||||
```
|
||||
|
||||
Notes:
|
||||
- The `--tlsAllowInvalidCertificates` flag is required on the server for the test suite to work correctly.
|
||||
- The test suite requires the auth database to be set with `?authSource=admin`, not `/admin`.
|
||||
|
||||
### Testing Compression
|
||||
|
||||
The MongoDB Go Driver supports wire protocol compression using Snappy, zLib, or zstd. To run tests with wire protocol compression, set `MONGO_GO_DRIVER_COMPRESSOR` to `snappy`, `zlib`, or `zstd`. For example:
|
||||
|
||||
```
|
||||
MONGO_GO_DRIVER_COMPRESSOR=snappy make
|
||||
```
|
||||
|
||||
Ensure the [`--networkMessageCompressors` flag](https://www.mongodb.com/docs/manual/reference/program/mongod/#cmdoption-mongod-networkmessagecompressors) on mongod or mongos includes `zlib` if testing zLib compression.
|
||||
|
||||
-------------------------
|
||||
## Contribution
|
||||
|
||||
Check out the [project page](https://jira.mongodb.org/browse/GODRIVER) for tickets that need completing. See our [contribution guidelines](docs/CONTRIBUTING.md) for details.
|
||||
|
||||
-------------------------
|
||||
## Continuous Integration
|
||||
|
||||
Commits to master are run automatically on [evergreen](https://evergreen.mongodb.com/waterfall/mongo-go-driver).
|
||||
|
||||
-------------------------
|
||||
## Frequently Encountered Issues
|
||||
|
||||
See our [common issues](docs/common-issues.md) documentation for troubleshooting frequently encountered issues.
|
||||
|
||||
-------------------------
|
||||
## Thanks and Acknowledgement
|
||||
|
||||
<a href="https://github.com/ashleymcnamara">@ashleymcnamara</a> - Mongo Gopher Artwork
|
||||
|
||||
-------------------------
|
||||
## License
|
||||
|
||||
The MongoDB Go Driver is licensed under the [Apache License](LICENSE).
|
1554
mongo/THIRD-PARTY-NOTICES
Normal file
1554
mongo/THIRD-PARTY-NOTICES
Normal file
File diff suppressed because it is too large
Load Diff
307
mongo/bson/benchmark_test.go
Normal file
307
mongo/bson/benchmark_test.go
Normal file
@@ -0,0 +1,307 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bson
|
||||
|
||||
import (
|
||||
"compress/gzip"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type encodetest struct {
|
||||
Field1String string
|
||||
Field1Int64 int64
|
||||
Field1Float64 float64
|
||||
Field2String string
|
||||
Field2Int64 int64
|
||||
Field2Float64 float64
|
||||
Field3String string
|
||||
Field3Int64 int64
|
||||
Field3Float64 float64
|
||||
Field4String string
|
||||
Field4Int64 int64
|
||||
Field4Float64 float64
|
||||
}
|
||||
|
||||
type nestedtest1 struct {
|
||||
Nested nestedtest2
|
||||
}
|
||||
|
||||
type nestedtest2 struct {
|
||||
Nested nestedtest3
|
||||
}
|
||||
|
||||
type nestedtest3 struct {
|
||||
Nested nestedtest4
|
||||
}
|
||||
|
||||
type nestedtest4 struct {
|
||||
Nested nestedtest5
|
||||
}
|
||||
|
||||
type nestedtest5 struct {
|
||||
Nested nestedtest6
|
||||
}
|
||||
|
||||
type nestedtest6 struct {
|
||||
Nested nestedtest7
|
||||
}
|
||||
|
||||
type nestedtest7 struct {
|
||||
Nested nestedtest8
|
||||
}
|
||||
|
||||
type nestedtest8 struct {
|
||||
Nested nestedtest9
|
||||
}
|
||||
|
||||
type nestedtest9 struct {
|
||||
Nested nestedtest10
|
||||
}
|
||||
|
||||
type nestedtest10 struct {
|
||||
Nested nestedtest11
|
||||
}
|
||||
|
||||
type nestedtest11 struct {
|
||||
Nested encodetest
|
||||
}
|
||||
|
||||
var encodetestInstance = encodetest{
|
||||
Field1String: "foo",
|
||||
Field1Int64: 1,
|
||||
Field1Float64: 3.0,
|
||||
Field2String: "bar",
|
||||
Field2Int64: 2,
|
||||
Field2Float64: 3.1,
|
||||
Field3String: "baz",
|
||||
Field3Int64: 3,
|
||||
Field3Float64: 3.14,
|
||||
Field4String: "qux",
|
||||
Field4Int64: 4,
|
||||
Field4Float64: 3.141,
|
||||
}
|
||||
|
||||
var nestedInstance = nestedtest1{
|
||||
nestedtest2{
|
||||
nestedtest3{
|
||||
nestedtest4{
|
||||
nestedtest5{
|
||||
nestedtest6{
|
||||
nestedtest7{
|
||||
nestedtest8{
|
||||
nestedtest9{
|
||||
nestedtest10{
|
||||
nestedtest11{
|
||||
encodetest{
|
||||
Field1String: "foo",
|
||||
Field1Int64: 1,
|
||||
Field1Float64: 3.0,
|
||||
Field2String: "bar",
|
||||
Field2Int64: 2,
|
||||
Field2Float64: 3.1,
|
||||
Field3String: "baz",
|
||||
Field3Int64: 3,
|
||||
Field3Float64: 3.14,
|
||||
Field4String: "qux",
|
||||
Field4Int64: 4,
|
||||
Field4Float64: 3.141,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const extendedBSONDir = "../testdata/extended_bson"
|
||||
|
||||
// readExtJSONFile reads the GZIP-compressed extended JSON document from the given filename in the
|
||||
// "extended BSON" test data directory (../testdata/extended_bson) and returns it as a
|
||||
// map[string]interface{}. It panics on any errors.
|
||||
func readExtJSONFile(filename string) map[string]interface{} {
|
||||
filePath := path.Join(extendedBSONDir, filename)
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("error opening file %q: %s", filePath, err))
|
||||
}
|
||||
defer func() {
|
||||
_ = file.Close()
|
||||
}()
|
||||
|
||||
gz, err := gzip.NewReader(file)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("error creating GZIP reader: %s", err))
|
||||
}
|
||||
defer func() {
|
||||
_ = gz.Close()
|
||||
}()
|
||||
|
||||
data, err := ioutil.ReadAll(gz)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("error reading GZIP contents of file: %s", err))
|
||||
}
|
||||
|
||||
var v map[string]interface{}
|
||||
err = UnmarshalExtJSON(data, false, &v)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("error unmarshalling extended JSON: %s", err))
|
||||
}
|
||||
|
||||
return v
|
||||
}
|
||||
|
||||
func BenchmarkMarshal(b *testing.B) {
|
||||
cases := []struct {
|
||||
desc string
|
||||
value interface{}
|
||||
}{
|
||||
{
|
||||
desc: "simple struct",
|
||||
value: encodetestInstance,
|
||||
},
|
||||
{
|
||||
desc: "nested struct",
|
||||
value: nestedInstance,
|
||||
},
|
||||
{
|
||||
desc: "deep_bson.json.gz",
|
||||
value: readExtJSONFile("deep_bson.json.gz"),
|
||||
},
|
||||
{
|
||||
desc: "flat_bson.json.gz",
|
||||
value: readExtJSONFile("flat_bson.json.gz"),
|
||||
},
|
||||
{
|
||||
desc: "full_bson.json.gz",
|
||||
value: readExtJSONFile("full_bson.json.gz"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
b.Run(tc.desc, func(b *testing.B) {
|
||||
b.Run("BSON", func(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := Marshal(tc.value)
|
||||
if err != nil {
|
||||
b.Errorf("error marshalling BSON: %s", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
b.Run("extJSON", func(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := MarshalExtJSON(tc.value, true, false)
|
||||
if err != nil {
|
||||
b.Errorf("error marshalling extended JSON: %s", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
b.Run("JSON", func(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := json.Marshal(tc.value)
|
||||
if err != nil {
|
||||
b.Errorf("error marshalling JSON: %s", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkUnmarshal(b *testing.B) {
|
||||
cases := []struct {
|
||||
desc string
|
||||
value interface{}
|
||||
}{
|
||||
{
|
||||
desc: "simple struct",
|
||||
value: encodetestInstance,
|
||||
},
|
||||
{
|
||||
desc: "nested struct",
|
||||
value: nestedInstance,
|
||||
},
|
||||
{
|
||||
desc: "deep_bson.json.gz",
|
||||
value: readExtJSONFile("deep_bson.json.gz"),
|
||||
},
|
||||
{
|
||||
desc: "flat_bson.json.gz",
|
||||
value: readExtJSONFile("flat_bson.json.gz"),
|
||||
},
|
||||
{
|
||||
desc: "full_bson.json.gz",
|
||||
value: readExtJSONFile("full_bson.json.gz"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
b.Run(tc.desc, func(b *testing.B) {
|
||||
b.Run("BSON", func(b *testing.B) {
|
||||
data, err := Marshal(tc.value)
|
||||
if err != nil {
|
||||
b.Errorf("error marshalling BSON: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
var v2 map[string]interface{}
|
||||
for i := 0; i < b.N; i++ {
|
||||
err := Unmarshal(data, &v2)
|
||||
if err != nil {
|
||||
b.Errorf("error unmarshalling BSON: %s", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
b.Run("extJSON", func(b *testing.B) {
|
||||
data, err := MarshalExtJSON(tc.value, true, false)
|
||||
if err != nil {
|
||||
b.Errorf("error marshalling extended JSON: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
var v2 map[string]interface{}
|
||||
for i := 0; i < b.N; i++ {
|
||||
err := UnmarshalExtJSON(data, true, &v2)
|
||||
if err != nil {
|
||||
b.Errorf("error unmarshalling extended JSON: %s", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
b.Run("JSON", func(b *testing.B) {
|
||||
data, err := json.Marshal(tc.value)
|
||||
if err != nil {
|
||||
b.Errorf("error marshalling JSON: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
var v2 map[string]interface{}
|
||||
for i := 0; i < b.N; i++ {
|
||||
err := json.Unmarshal(data, &v2)
|
||||
if err != nil {
|
||||
b.Errorf("error unmarshalling JSON: %s", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
50
mongo/bson/bson.go
Normal file
50
mongo/bson/bson.go
Normal file
@@ -0,0 +1,50 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Based on gopkg.in/mgo.v2/bson by Gustavo Niemeyer
|
||||
// See THIRD-PARTY-NOTICES for original license terms.
|
||||
|
||||
package bson // import "go.mongodb.org/mongo-driver/bson"
|
||||
|
||||
import (
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
)
|
||||
|
||||
// Zeroer allows custom struct types to implement a report of zero
|
||||
// state. All struct types that don't implement Zeroer or where IsZero
|
||||
// returns false are considered to be not zero.
|
||||
type Zeroer interface {
|
||||
IsZero() bool
|
||||
}
|
||||
|
||||
// D is an ordered representation of a BSON document. This type should be used when the order of the elements matters,
|
||||
// such as MongoDB command documents. If the order of the elements does not matter, an M should be used instead.
|
||||
//
|
||||
// A D should not be constructed with duplicate key names, as that can cause undefined server behavior.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
|
||||
type D = primitive.D
|
||||
|
||||
// E represents a BSON element for a D. It is usually used inside a D.
|
||||
type E = primitive.E
|
||||
|
||||
// M is an unordered representation of a BSON document. This type should be used when the order of the elements does not
|
||||
// matter. This type is handled as a regular map[string]interface{} when encoding and decoding. Elements will be
|
||||
// serialized in an undefined, random order. If the order of the elements matters, a D should be used instead.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159}
|
||||
type M = primitive.M
|
||||
|
||||
// An A is an ordered representation of a BSON array.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// bson.A{"bar", "world", 3.14159, bson.D{{"qux", 12345}}}
|
||||
type A = primitive.A
|
530
mongo/bson/bson_corpus_spec_test.go
Normal file
530
mongo/bson/bson_corpus_spec_test.go
Normal file
@@ -0,0 +1,530 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bson
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/tidwall/pretty"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"go.mongodb.org/mongo-driver/internal/testutil/assert"
|
||||
)
|
||||
|
||||
type testCase struct {
|
||||
Description string `json:"description"`
|
||||
BsonType string `json:"bson_type"`
|
||||
TestKey *string `json:"test_key"`
|
||||
Valid []validityTestCase `json:"valid"`
|
||||
DecodeErrors []decodeErrorTestCase `json:"decodeErrors"`
|
||||
ParseErrors []parseErrorTestCase `json:"parseErrors"`
|
||||
Deprecated *bool `json:"deprecated"`
|
||||
}
|
||||
|
||||
type validityTestCase struct {
|
||||
Description string `json:"description"`
|
||||
CanonicalBson string `json:"canonical_bson"`
|
||||
CanonicalExtJSON string `json:"canonical_extjson"`
|
||||
RelaxedExtJSON *string `json:"relaxed_extjson"`
|
||||
DegenerateBSON *string `json:"degenerate_bson"`
|
||||
DegenerateExtJSON *string `json:"degenerate_extjson"`
|
||||
ConvertedBSON *string `json:"converted_bson"`
|
||||
ConvertedExtJSON *string `json:"converted_extjson"`
|
||||
Lossy *bool `json:"lossy"`
|
||||
}
|
||||
|
||||
type decodeErrorTestCase struct {
|
||||
Description string `json:"description"`
|
||||
Bson string `json:"bson"`
|
||||
}
|
||||
|
||||
type parseErrorTestCase struct {
|
||||
Description string `json:"description"`
|
||||
String string `json:"string"`
|
||||
}
|
||||
|
||||
const dataDir = "../testdata/bson-corpus/"
|
||||
|
||||
func findJSONFilesInDir(dir string) ([]string, error) {
|
||||
files := make([]string, 0)
|
||||
|
||||
entries, err := os.ReadDir(dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() || path.Ext(entry.Name()) != ".json" {
|
||||
continue
|
||||
}
|
||||
|
||||
files = append(files, entry.Name())
|
||||
}
|
||||
|
||||
return files, nil
|
||||
}
|
||||
|
||||
// seedExtJSON will add the byte representation of the "extJSON" string to the fuzzer's coprus.
|
||||
func seedExtJSON(f *testing.F, extJSON string, extJSONType string, desc string) {
|
||||
jbytes, err := jsonToBytes(extJSON, extJSONType, desc)
|
||||
if err != nil {
|
||||
f.Fatalf("failed to convert JSON to bytes: %v", err)
|
||||
}
|
||||
|
||||
f.Add(jbytes)
|
||||
}
|
||||
|
||||
// seedTestCase will add the byte representation for each "extJSON" string of each valid test case to the fuzzer's
|
||||
// corpus.
|
||||
func seedTestCase(f *testing.F, tcase *testCase) {
|
||||
for _, vtc := range tcase.Valid {
|
||||
seedExtJSON(f, vtc.CanonicalExtJSON, "canonical", vtc.Description)
|
||||
|
||||
// Seed the relaxed extended JSON.
|
||||
if vtc.RelaxedExtJSON != nil {
|
||||
seedExtJSON(f, *vtc.RelaxedExtJSON, "relaxed", vtc.Description)
|
||||
}
|
||||
|
||||
// Seed the degenerate extended JSON.
|
||||
if vtc.DegenerateExtJSON != nil {
|
||||
seedExtJSON(f, *vtc.DegenerateExtJSON, "degenerate", vtc.Description)
|
||||
}
|
||||
|
||||
// Seed the converted extended JSON.
|
||||
if vtc.ConvertedExtJSON != nil {
|
||||
seedExtJSON(f, *vtc.ConvertedExtJSON, "converted", vtc.Description)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// seedBSONCorpus will unmarshal the data from "testdata/bson-corpus" into a slice of "testCase" structs and then
|
||||
// marshal the "*_extjson" field of each "validityTestCase" into a slice of bytes to seed the fuzz corpus.
|
||||
func seedBSONCorpus(f *testing.F) {
|
||||
fileNames, err := findJSONFilesInDir(dataDir)
|
||||
if err != nil {
|
||||
f.Fatalf("failed to find JSON files in directory %q: %v", dataDir, err)
|
||||
}
|
||||
|
||||
for _, fileName := range fileNames {
|
||||
filePath := path.Join(dataDir, fileName)
|
||||
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
f.Fatalf("failed to open file %q: %v", filePath, err)
|
||||
}
|
||||
|
||||
var tcase testCase
|
||||
if err := json.NewDecoder(file).Decode(&tcase); err != nil {
|
||||
f.Fatal(err)
|
||||
}
|
||||
|
||||
seedTestCase(f, &tcase)
|
||||
}
|
||||
}
|
||||
|
||||
func needsEscapedUnicode(bsonType string) bool {
|
||||
return bsonType == "0x02" || bsonType == "0x0D" || bsonType == "0x0E" || bsonType == "0x0F"
|
||||
}
|
||||
|
||||
func unescapeUnicode(s, bsonType string) string {
|
||||
if !needsEscapedUnicode(bsonType) {
|
||||
return s
|
||||
}
|
||||
|
||||
newS := ""
|
||||
|
||||
for i := 0; i < len(s); i++ {
|
||||
c := s[i]
|
||||
switch c {
|
||||
case '\\':
|
||||
switch s[i+1] {
|
||||
case 'u':
|
||||
us := s[i : i+6]
|
||||
u, err := strconv.Unquote(strings.Replace(strconv.Quote(us), `\\u`, `\u`, 1))
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
for _, r := range u {
|
||||
if r < ' ' {
|
||||
newS += fmt.Sprintf(`\u%04x`, r)
|
||||
} else {
|
||||
newS += string(r)
|
||||
}
|
||||
}
|
||||
i += 5
|
||||
default:
|
||||
newS += string(c)
|
||||
}
|
||||
default:
|
||||
if c > unicode.MaxASCII {
|
||||
r, size := utf8.DecodeRune([]byte(s[i:]))
|
||||
newS += string(r)
|
||||
i += size - 1
|
||||
} else {
|
||||
newS += string(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return newS
|
||||
}
|
||||
|
||||
func formatDouble(f float64) string {
|
||||
var s string
|
||||
if math.IsInf(f, 1) {
|
||||
s = "Infinity"
|
||||
} else if math.IsInf(f, -1) {
|
||||
s = "-Infinity"
|
||||
} else if math.IsNaN(f) {
|
||||
s = "NaN"
|
||||
} else {
|
||||
// Print exactly one decimalType place for integers; otherwise, print as many are necessary to
|
||||
// perfectly represent it.
|
||||
s = strconv.FormatFloat(f, 'G', -1, 64)
|
||||
if !strings.ContainsRune(s, 'E') && !strings.ContainsRune(s, '.') {
|
||||
s += ".0"
|
||||
}
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeCanonicalDouble(t *testing.T, key string, cEJ string) string {
|
||||
// Unmarshal string into map
|
||||
cEJMap := make(map[string]map[string]string)
|
||||
err := json.Unmarshal([]byte(cEJ), &cEJMap)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Parse the float contained by the map.
|
||||
expectedString := cEJMap[key]["$numberDouble"]
|
||||
expectedFloat, err := strconv.ParseFloat(expectedString, 64)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Normalize the string
|
||||
return fmt.Sprintf(`{"%s":{"$numberDouble":"%s"}}`, key, formatDouble(expectedFloat))
|
||||
}
|
||||
|
||||
func normalizeRelaxedDouble(t *testing.T, key string, rEJ string) string {
|
||||
// Unmarshal string into map
|
||||
rEJMap := make(map[string]float64)
|
||||
err := json.Unmarshal([]byte(rEJ), &rEJMap)
|
||||
if err != nil {
|
||||
return normalizeCanonicalDouble(t, key, rEJ)
|
||||
}
|
||||
|
||||
// Parse the float contained by the map.
|
||||
expectedFloat := rEJMap[key]
|
||||
|
||||
// Normalize the string
|
||||
return fmt.Sprintf(`{"%s":%s}`, key, formatDouble(expectedFloat))
|
||||
}
|
||||
|
||||
// bsonToNative decodes the BSON bytes (b) into a native Document
|
||||
func bsonToNative(t *testing.T, b []byte, bType, testDesc string) D {
|
||||
var doc D
|
||||
err := Unmarshal(b, &doc)
|
||||
expectNoError(t, err, fmt.Sprintf("%s: decoding %s BSON", testDesc, bType))
|
||||
return doc
|
||||
}
|
||||
|
||||
// nativeToBSON encodes the native Document (doc) into canonical BSON and compares it to the expected
|
||||
// canonical BSON (cB)
|
||||
func nativeToBSON(t *testing.T, cB []byte, doc D, testDesc, bType, docSrcDesc string) {
|
||||
actual, err := Marshal(doc)
|
||||
expectNoError(t, err, fmt.Sprintf("%s: encoding %s BSON", testDesc, bType))
|
||||
|
||||
if diff := cmp.Diff(cB, actual); diff != "" {
|
||||
t.Errorf("%s: 'native_to_bson(%s) = cB' failed (-want, +got):\n-%v\n+%v\n",
|
||||
testDesc, docSrcDesc, cB, actual)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
// jsonToNative decodes the extended JSON string (ej) into a native Document
|
||||
func jsonToNative(ej, ejType, testDesc string) (D, error) {
|
||||
var doc D
|
||||
if err := UnmarshalExtJSON([]byte(ej), ejType != "relaxed", &doc); err != nil {
|
||||
return nil, fmt.Errorf("%s: decoding %s extended JSON: %w", testDesc, ejType, err)
|
||||
}
|
||||
return doc, nil
|
||||
}
|
||||
|
||||
// jsonToBytes decodes the extended JSON string (ej) into canonical BSON and then encodes it into a byte slice.
|
||||
func jsonToBytes(ej, ejType, testDesc string) ([]byte, error) {
|
||||
native, err := jsonToNative(ej, ejType, testDesc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b, err := Marshal(native)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%s: encoding %s BSON: %w", testDesc, ejType, err)
|
||||
}
|
||||
|
||||
return b, nil
|
||||
}
|
||||
|
||||
// nativeToJSON encodes the native Document (doc) into an extended JSON string
|
||||
func nativeToJSON(t *testing.T, ej string, doc D, testDesc, ejType, ejShortName, docSrcDesc string) {
|
||||
actualEJ, err := MarshalExtJSON(doc, ejType != "relaxed", true)
|
||||
expectNoError(t, err, fmt.Sprintf("%s: encoding %s extended JSON", testDesc, ejType))
|
||||
|
||||
if diff := cmp.Diff(ej, string(actualEJ)); diff != "" {
|
||||
t.Errorf("%s: 'native_to_%s_extended_json(%s) = %s' failed (-want, +got):\n%s\n",
|
||||
testDesc, ejType, docSrcDesc, ejShortName, diff)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func runTest(t *testing.T, file string) {
|
||||
filepath := path.Join(dataDir, file)
|
||||
content, err := os.ReadFile(filepath)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Remove ".json" from filename.
|
||||
file = file[:len(file)-5]
|
||||
testName := "bson_corpus--" + file
|
||||
|
||||
t.Run(testName, func(t *testing.T) {
|
||||
var test testCase
|
||||
require.NoError(t, json.Unmarshal(content, &test))
|
||||
|
||||
t.Run("valid", func(t *testing.T) {
|
||||
for _, v := range test.Valid {
|
||||
t.Run(v.Description, func(t *testing.T) {
|
||||
// get canonical BSON
|
||||
cB, err := hex.DecodeString(v.CanonicalBson)
|
||||
expectNoError(t, err, fmt.Sprintf("%s: reading canonical BSON", v.Description))
|
||||
|
||||
// get canonical extended JSON
|
||||
cEJ := unescapeUnicode(string(pretty.Ugly([]byte(v.CanonicalExtJSON))), test.BsonType)
|
||||
if test.BsonType == "0x01" {
|
||||
cEJ = normalizeCanonicalDouble(t, *test.TestKey, cEJ)
|
||||
}
|
||||
|
||||
/*** canonical BSON round-trip tests ***/
|
||||
doc := bsonToNative(t, cB, "canonical", v.Description)
|
||||
|
||||
// native_to_bson(bson_to_native(cB)) = cB
|
||||
nativeToBSON(t, cB, doc, v.Description, "canonical", "bson_to_native(cB)")
|
||||
|
||||
// native_to_canonical_extended_json(bson_to_native(cB)) = cEJ
|
||||
nativeToJSON(t, cEJ, doc, v.Description, "canonical", "cEJ", "bson_to_native(cB)")
|
||||
|
||||
// native_to_relaxed_extended_json(bson_to_native(cB)) = rEJ (if rEJ exists)
|
||||
if v.RelaxedExtJSON != nil {
|
||||
rEJ := unescapeUnicode(string(pretty.Ugly([]byte(*v.RelaxedExtJSON))), test.BsonType)
|
||||
if test.BsonType == "0x01" {
|
||||
rEJ = normalizeRelaxedDouble(t, *test.TestKey, rEJ)
|
||||
}
|
||||
|
||||
nativeToJSON(t, rEJ, doc, v.Description, "relaxed", "rEJ", "bson_to_native(cB)")
|
||||
|
||||
/*** relaxed extended JSON round-trip tests (if exists) ***/
|
||||
doc, err = jsonToNative(rEJ, "relaxed", v.Description)
|
||||
require.NoError(t, err)
|
||||
|
||||
// native_to_relaxed_extended_json(json_to_native(rEJ)) = rEJ
|
||||
nativeToJSON(t, rEJ, doc, v.Description, "relaxed", "eJR", "json_to_native(rEJ)")
|
||||
}
|
||||
|
||||
/*** canonical extended JSON round-trip tests ***/
|
||||
doc, err = jsonToNative(cEJ, "canonical", v.Description)
|
||||
require.NoError(t, err)
|
||||
|
||||
// native_to_canonical_extended_json(json_to_native(cEJ)) = cEJ
|
||||
nativeToJSON(t, cEJ, doc, v.Description, "canonical", "cEJ", "json_to_native(cEJ)")
|
||||
|
||||
// native_to_bson(json_to_native(cEJ)) = cb (unless lossy)
|
||||
if v.Lossy == nil || !*v.Lossy {
|
||||
nativeToBSON(t, cB, doc, v.Description, "canonical", "json_to_native(cEJ)")
|
||||
}
|
||||
|
||||
/*** degenerate BSON round-trip tests (if exists) ***/
|
||||
if v.DegenerateBSON != nil {
|
||||
dB, err := hex.DecodeString(*v.DegenerateBSON)
|
||||
expectNoError(t, err, fmt.Sprintf("%s: reading degenerate BSON", v.Description))
|
||||
|
||||
doc = bsonToNative(t, dB, "degenerate", v.Description)
|
||||
|
||||
// native_to_bson(bson_to_native(dB)) = cB
|
||||
nativeToBSON(t, cB, doc, v.Description, "degenerate", "bson_to_native(dB)")
|
||||
}
|
||||
|
||||
/*** degenerate JSON round-trip tests (if exists) ***/
|
||||
if v.DegenerateExtJSON != nil {
|
||||
dEJ := unescapeUnicode(string(pretty.Ugly([]byte(*v.DegenerateExtJSON))), test.BsonType)
|
||||
if test.BsonType == "0x01" {
|
||||
dEJ = normalizeCanonicalDouble(t, *test.TestKey, dEJ)
|
||||
}
|
||||
|
||||
doc, err = jsonToNative(dEJ, "degenerate canonical", v.Description)
|
||||
require.NoError(t, err)
|
||||
|
||||
// native_to_canonical_extended_json(json_to_native(dEJ)) = cEJ
|
||||
nativeToJSON(t, cEJ, doc, v.Description, "degenerate canonical", "cEJ", "json_to_native(dEJ)")
|
||||
|
||||
// native_to_bson(json_to_native(dEJ)) = cB (unless lossy)
|
||||
if v.Lossy == nil || !*v.Lossy {
|
||||
nativeToBSON(t, cB, doc, v.Description, "canonical", "json_to_native(dEJ)")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("decode error", func(t *testing.T) {
|
||||
for _, d := range test.DecodeErrors {
|
||||
t.Run(d.Description, func(t *testing.T) {
|
||||
b, err := hex.DecodeString(d.Bson)
|
||||
expectNoError(t, err, d.Description)
|
||||
|
||||
var doc D
|
||||
err = Unmarshal(b, &doc)
|
||||
|
||||
// The driver unmarshals invalid UTF-8 strings without error. Loop over the unmarshalled elements
|
||||
// and assert that there was no error if any of the string or DBPointer values contain invalid UTF-8
|
||||
// characters.
|
||||
for _, elem := range doc {
|
||||
str, ok := elem.Value.(string)
|
||||
invalidString := ok && !utf8.ValidString(str)
|
||||
dbPtr, ok := elem.Value.(primitive.DBPointer)
|
||||
invalidDBPtr := ok && !utf8.ValidString(dbPtr.DB)
|
||||
|
||||
if invalidString || invalidDBPtr {
|
||||
expectNoError(t, err, d.Description)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
expectError(t, err, fmt.Sprintf("%s: expected decode error", d.Description))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("parse error", func(t *testing.T) {
|
||||
for _, p := range test.ParseErrors {
|
||||
t.Run(p.Description, func(t *testing.T) {
|
||||
s := unescapeUnicode(p.String, test.BsonType)
|
||||
if test.BsonType == "0x13" {
|
||||
s = fmt.Sprintf(`{"decimal128": {"$numberDecimal": "%s"}}`, s)
|
||||
}
|
||||
|
||||
switch test.BsonType {
|
||||
case "0x00", "0x05", "0x13":
|
||||
var doc D
|
||||
err := UnmarshalExtJSON([]byte(s), true, &doc)
|
||||
// Null bytes are validated when marshaling to BSON
|
||||
if strings.Contains(p.Description, "Null") {
|
||||
_, err = Marshal(doc)
|
||||
}
|
||||
expectError(t, err, fmt.Sprintf("%s: expected parse error", p.Description))
|
||||
default:
|
||||
t.Errorf("Update test to check for parse errors for type %s", test.BsonType)
|
||||
t.Fail()
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func Test_BsonCorpus(t *testing.T) {
|
||||
jsonFiles, err := findJSONFilesInDir(dataDir)
|
||||
if err != nil {
|
||||
t.Fatalf("error finding JSON files in %s: %v", dataDir, err)
|
||||
}
|
||||
|
||||
for _, file := range jsonFiles {
|
||||
runTest(t, file)
|
||||
}
|
||||
}
|
||||
|
||||
func expectNoError(t *testing.T, err error, desc string) {
|
||||
if err != nil {
|
||||
t.Helper()
|
||||
t.Errorf("%s: Unepexted error: %v", desc, err)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func expectError(t *testing.T, err error, desc string) {
|
||||
if err == nil {
|
||||
t.Helper()
|
||||
t.Errorf("%s: Expected error", desc)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestRelaxedUUIDValidation(t *testing.T) {
|
||||
testCases := []struct {
|
||||
description string
|
||||
canonicalExtJSON string
|
||||
degenerateExtJSON string
|
||||
expectedErr string
|
||||
}{
|
||||
{
|
||||
"valid uuid",
|
||||
"{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"04\"}}}",
|
||||
"{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4\"}}",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"invalid uuid--no hyphens",
|
||||
"",
|
||||
"{\"x\" : { \"$uuid\" : \"73ffd26444b34c6990e8e7d1dfc035d4\"}}",
|
||||
"$uuid value does not follow RFC 4122 format regarding length and hyphens",
|
||||
},
|
||||
{
|
||||
"invalid uuid--trailing hyphens",
|
||||
"",
|
||||
"{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035--\"}}",
|
||||
"$uuid value does not follow RFC 4122 format regarding length and hyphens",
|
||||
},
|
||||
{
|
||||
"invalid uuid--malformed hex",
|
||||
"",
|
||||
"{\"x\" : { \"$uuid\" : \"q3@fd26l-44b3-4c69-90e8-e7d1dfc035d4\"}}",
|
||||
"$uuid value does not follow RFC 4122 format regarding hex bytes: encoding/hex: invalid byte: U+0071 'q'",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.description, func(t *testing.T) {
|
||||
// get canonical extended JSON
|
||||
cEJ := unescapeUnicode(string(pretty.Ugly([]byte(tc.canonicalExtJSON))), "0x05")
|
||||
|
||||
// get degenerate extended JSON
|
||||
dEJ := unescapeUnicode(string(pretty.Ugly([]byte(tc.degenerateExtJSON))), "0x05")
|
||||
|
||||
// convert dEJ to native doc
|
||||
var doc D
|
||||
err := UnmarshalExtJSON([]byte(dEJ), true, &doc)
|
||||
|
||||
if tc.expectedErr != "" {
|
||||
assert.Equal(t, tc.expectedErr, err.Error(), "expected error %v, got %v", tc.expectedErr, err)
|
||||
} else {
|
||||
assert.Nil(t, err, "expected no error, got error: %v", err)
|
||||
|
||||
// Marshal doc into extended JSON and compare with cEJ
|
||||
nativeToJSON(t, cEJ, doc, tc.description, "degenerate canonical", "cEJ", "json_to_native(dEJ)")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
279
mongo/bson/bson_test.go
Normal file
279
mongo/bson/bson_test.go
Normal file
@@ -0,0 +1,279 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bson
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"go.mongodb.org/mongo-driver/bson/bsoncodec"
|
||||
"go.mongodb.org/mongo-driver/bson/bsonoptions"
|
||||
"go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
"go.mongodb.org/mongo-driver/internal/testutil/assert"
|
||||
"go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
|
||||
)
|
||||
|
||||
func noerr(t *testing.T, err error) {
|
||||
if err != nil {
|
||||
t.Helper()
|
||||
t.Errorf("Unexpected error: (%T)%v", err, err)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestTimeRoundTrip(t *testing.T) {
|
||||
val := struct {
|
||||
Value time.Time
|
||||
ID string
|
||||
}{
|
||||
ID: "time-rt-test",
|
||||
}
|
||||
|
||||
if !val.Value.IsZero() {
|
||||
t.Errorf("Did not get zero time as expected.")
|
||||
}
|
||||
|
||||
bsonOut, err := Marshal(val)
|
||||
noerr(t, err)
|
||||
rtval := struct {
|
||||
Value time.Time
|
||||
ID string
|
||||
}{}
|
||||
|
||||
err = Unmarshal(bsonOut, &rtval)
|
||||
noerr(t, err)
|
||||
if !cmp.Equal(val, rtval) {
|
||||
t.Errorf("Did not round trip properly. got %v; want %v", val, rtval)
|
||||
}
|
||||
if !rtval.Value.IsZero() {
|
||||
t.Errorf("Did not get zero time as expected.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestNonNullTimeRoundTrip(t *testing.T) {
|
||||
now := time.Now()
|
||||
now = time.Unix(now.Unix(), 0)
|
||||
val := struct {
|
||||
Value time.Time
|
||||
ID string
|
||||
}{
|
||||
ID: "time-rt-test",
|
||||
Value: now,
|
||||
}
|
||||
|
||||
bsonOut, err := Marshal(val)
|
||||
noerr(t, err)
|
||||
rtval := struct {
|
||||
Value time.Time
|
||||
ID string
|
||||
}{}
|
||||
|
||||
err = Unmarshal(bsonOut, &rtval)
|
||||
noerr(t, err)
|
||||
if !cmp.Equal(val, rtval) {
|
||||
t.Errorf("Did not round trip properly. got %v; want %v", val, rtval)
|
||||
}
|
||||
}
|
||||
|
||||
func TestD(t *testing.T) {
|
||||
t.Run("can marshal", func(t *testing.T) {
|
||||
d := D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
|
||||
idx, want := bsoncore.AppendDocumentStart(nil)
|
||||
want = bsoncore.AppendStringElement(want, "foo", "bar")
|
||||
want = bsoncore.AppendStringElement(want, "hello", "world")
|
||||
want = bsoncore.AppendDoubleElement(want, "pi", 3.14159)
|
||||
want, err := bsoncore.AppendDocumentEnd(want, idx)
|
||||
noerr(t, err)
|
||||
got, err := Marshal(d)
|
||||
noerr(t, err)
|
||||
if !bytes.Equal(got, want) {
|
||||
t.Errorf("Marshaled documents do not match. got %v; want %v", Raw(got), Raw(want))
|
||||
}
|
||||
})
|
||||
t.Run("can unmarshal", func(t *testing.T) {
|
||||
want := D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
|
||||
idx, doc := bsoncore.AppendDocumentStart(nil)
|
||||
doc = bsoncore.AppendStringElement(doc, "foo", "bar")
|
||||
doc = bsoncore.AppendStringElement(doc, "hello", "world")
|
||||
doc = bsoncore.AppendDoubleElement(doc, "pi", 3.14159)
|
||||
doc, err := bsoncore.AppendDocumentEnd(doc, idx)
|
||||
noerr(t, err)
|
||||
var got D
|
||||
err = Unmarshal(doc, &got)
|
||||
noerr(t, err)
|
||||
if !cmp.Equal(got, want) {
|
||||
t.Errorf("Unmarshaled documents do not match. got %v; want %v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
type stringerString string
|
||||
|
||||
func (ss stringerString) String() string {
|
||||
return "bar"
|
||||
}
|
||||
|
||||
type keyBool bool
|
||||
|
||||
func (kb keyBool) MarshalKey() (string, error) {
|
||||
return fmt.Sprintf("%v", kb), nil
|
||||
}
|
||||
|
||||
func (kb *keyBool) UnmarshalKey(key string) error {
|
||||
switch key {
|
||||
case "true":
|
||||
*kb = true
|
||||
case "false":
|
||||
*kb = false
|
||||
default:
|
||||
return fmt.Errorf("invalid bool value %v", key)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type keyStruct struct {
|
||||
val int64
|
||||
}
|
||||
|
||||
func (k keyStruct) MarshalText() (text []byte, err error) {
|
||||
str := strconv.FormatInt(k.val, 10)
|
||||
|
||||
return []byte(str), nil
|
||||
}
|
||||
|
||||
func (k *keyStruct) UnmarshalText(text []byte) error {
|
||||
val, err := strconv.ParseInt(string(text), 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*k = keyStruct{
|
||||
val: val,
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func TestMapCodec(t *testing.T) {
|
||||
t.Run("EncodeKeysWithStringer", func(t *testing.T) {
|
||||
strstr := stringerString("foo")
|
||||
mapObj := map[stringerString]int{strstr: 1}
|
||||
testCases := []struct {
|
||||
name string
|
||||
opts *bsonoptions.MapCodecOptions
|
||||
key string
|
||||
}{
|
||||
{"default", bsonoptions.MapCodec(), "foo"},
|
||||
{"true", bsonoptions.MapCodec().SetEncodeKeysWithStringer(true), "bar"},
|
||||
{"false", bsonoptions.MapCodec().SetEncodeKeysWithStringer(false), "foo"},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
mapCodec := bsoncodec.NewMapCodec(tc.opts)
|
||||
mapRegistry := NewRegistryBuilder().RegisterDefaultEncoder(reflect.Map, mapCodec).Build()
|
||||
val, err := MarshalWithRegistry(mapRegistry, mapObj)
|
||||
assert.Nil(t, err, "Marshal error: %v", err)
|
||||
assert.True(t, strings.Contains(string(val), tc.key), "expected result to contain %v, got: %v", tc.key, string(val))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("keys implements keyMarshaler and keyUnmarshaler", func(t *testing.T) {
|
||||
mapObj := map[keyBool]int{keyBool(true): 1}
|
||||
|
||||
doc, err := Marshal(mapObj)
|
||||
assert.Nil(t, err, "Marshal error: %v", err)
|
||||
idx, want := bsoncore.AppendDocumentStart(nil)
|
||||
want = bsoncore.AppendInt32Element(want, "true", 1)
|
||||
want, _ = bsoncore.AppendDocumentEnd(want, idx)
|
||||
assert.Equal(t, want, doc, "expected result %v, got %v", string(want), string(doc))
|
||||
|
||||
var got map[keyBool]int
|
||||
err = Unmarshal(doc, &got)
|
||||
assert.Nil(t, err, "Unmarshal error: %v", err)
|
||||
assert.Equal(t, mapObj, got, "expected result %v, got %v", mapObj, got)
|
||||
|
||||
})
|
||||
|
||||
t.Run("keys implements encoding.TextMarshaler and encoding.TextUnmarshaler", func(t *testing.T) {
|
||||
mapObj := map[keyStruct]int{
|
||||
{val: 10}: 100,
|
||||
}
|
||||
|
||||
doc, err := Marshal(mapObj)
|
||||
assert.Nil(t, err, "Marshal error: %v", err)
|
||||
idx, want := bsoncore.AppendDocumentStart(nil)
|
||||
want = bsoncore.AppendInt32Element(want, "10", 100)
|
||||
want, _ = bsoncore.AppendDocumentEnd(want, idx)
|
||||
assert.Equal(t, want, doc, "expected result %v, got %v", string(want), string(doc))
|
||||
|
||||
var got map[keyStruct]int
|
||||
err = Unmarshal(doc, &got)
|
||||
assert.Nil(t, err, "Unmarshal error: %v", err)
|
||||
assert.Equal(t, mapObj, got, "expected result %v, got %v", mapObj, got)
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
func TestExtJSONEscapeKey(t *testing.T) {
|
||||
doc := D{{Key: "\\usb#", Value: int32(1)}}
|
||||
b, err := MarshalExtJSON(&doc, false, false)
|
||||
noerr(t, err)
|
||||
|
||||
want := "{\"\\\\usb#\":1}"
|
||||
if diff := cmp.Diff(want, string(b)); diff != "" {
|
||||
t.Errorf("Marshaled documents do not match. got %v, want %v", string(b), want)
|
||||
}
|
||||
|
||||
var got D
|
||||
err = UnmarshalExtJSON(b, false, &got)
|
||||
noerr(t, err)
|
||||
if !cmp.Equal(got, doc) {
|
||||
t.Errorf("Unmarshaled documents do not match. got %v; want %v", got, doc)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBsoncoreArray(t *testing.T) {
|
||||
type BSONDocumentArray struct {
|
||||
Array []D `bson:"array"`
|
||||
}
|
||||
|
||||
type BSONArray struct {
|
||||
Array bsoncore.Array `bson:"array"`
|
||||
}
|
||||
|
||||
bda := BSONDocumentArray{
|
||||
Array: []D{
|
||||
{{"x", 1}},
|
||||
{{"x", 2}},
|
||||
{{"x", 3}},
|
||||
},
|
||||
}
|
||||
|
||||
expectedBSON, err := Marshal(bda)
|
||||
assert.Nil(t, err, "Marshal bsoncore.Document array error: %v", err)
|
||||
|
||||
var ba BSONArray
|
||||
err = Unmarshal(expectedBSON, &ba)
|
||||
assert.Nil(t, err, "Unmarshal error: %v", err)
|
||||
|
||||
actualBSON, err := Marshal(ba)
|
||||
assert.Nil(t, err, "Marshal bsoncore.Array error: %v", err)
|
||||
|
||||
assert.Equal(t, expectedBSON, actualBSON,
|
||||
"expected BSON to be %v after Marshalling again; got %v", expectedBSON, actualBSON)
|
||||
|
||||
doc := bsoncore.Document(actualBSON)
|
||||
v := doc.Lookup("array")
|
||||
assert.Equal(t, bsontype.Array, v.Type, "expected type array, got %v", v.Type)
|
||||
}
|
50
mongo/bson/bsoncodec/array_codec.go
Normal file
50
mongo/bson/bsoncodec/array_codec.go
Normal file
@@ -0,0 +1,50 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bsoncodec
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
"go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
|
||||
)
|
||||
|
||||
// ArrayCodec is the Codec used for bsoncore.Array values.
|
||||
type ArrayCodec struct{}
|
||||
|
||||
var defaultArrayCodec = NewArrayCodec()
|
||||
|
||||
// NewArrayCodec returns an ArrayCodec.
|
||||
func NewArrayCodec() *ArrayCodec {
|
||||
return &ArrayCodec{}
|
||||
}
|
||||
|
||||
// EncodeValue is the ValueEncoder for bsoncore.Array values.
|
||||
func (ac *ArrayCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
|
||||
if !val.IsValid() || val.Type() != tCoreArray {
|
||||
return ValueEncoderError{Name: "CoreArrayEncodeValue", Types: []reflect.Type{tCoreArray}, Received: val}
|
||||
}
|
||||
|
||||
arr := val.Interface().(bsoncore.Array)
|
||||
return bsonrw.Copier{}.CopyArrayFromBytes(vw, arr)
|
||||
}
|
||||
|
||||
// DecodeValue is the ValueDecoder for bsoncore.Array values.
|
||||
func (ac *ArrayCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
|
||||
if !val.CanSet() || val.Type() != tCoreArray {
|
||||
return ValueDecoderError{Name: "CoreArrayDecodeValue", Types: []reflect.Type{tCoreArray}, Received: val}
|
||||
}
|
||||
|
||||
if val.IsNil() {
|
||||
val.Set(reflect.MakeSlice(val.Type(), 0, 0))
|
||||
}
|
||||
|
||||
val.SetLen(0)
|
||||
arr, err := bsonrw.Copier{}.AppendArrayBytes(val.Interface().(bsoncore.Array), vr)
|
||||
val.Set(reflect.ValueOf(arr))
|
||||
return err
|
||||
}
|
238
mongo/bson/bsoncodec/bsoncodec.go
Normal file
238
mongo/bson/bsoncodec/bsoncodec.go
Normal file
@@ -0,0 +1,238 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bsoncodec // import "go.mongodb.org/mongo-driver/bson/bsoncodec"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
"go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
)
|
||||
|
||||
var (
|
||||
emptyValue = reflect.Value{}
|
||||
)
|
||||
|
||||
// Marshaler is an interface implemented by types that can marshal themselves
|
||||
// into a BSON document represented as bytes. The bytes returned must be a valid
|
||||
// BSON document if the error is nil.
|
||||
type Marshaler interface {
|
||||
MarshalBSON() ([]byte, error)
|
||||
}
|
||||
|
||||
// ValueMarshaler is an interface implemented by types that can marshal
|
||||
// themselves into a BSON value as bytes. The type must be the valid type for
|
||||
// the bytes returned. The bytes and byte type together must be valid if the
|
||||
// error is nil.
|
||||
type ValueMarshaler interface {
|
||||
MarshalBSONValue() (bsontype.Type, []byte, error)
|
||||
}
|
||||
|
||||
// Unmarshaler is an interface implemented by types that can unmarshal a BSON
|
||||
// document representation of themselves. The BSON bytes can be assumed to be
|
||||
// valid. UnmarshalBSON must copy the BSON bytes if it wishes to retain the data
|
||||
// after returning.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalBSON([]byte) error
|
||||
}
|
||||
|
||||
// ValueUnmarshaler is an interface implemented by types that can unmarshal a
|
||||
// BSON value representation of themselves. The BSON bytes and type can be
|
||||
// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it
|
||||
// wishes to retain the data after returning.
|
||||
type ValueUnmarshaler interface {
|
||||
UnmarshalBSONValue(bsontype.Type, []byte) error
|
||||
}
|
||||
|
||||
// ValueEncoderError is an error returned from a ValueEncoder when the provided value can't be
|
||||
// encoded by the ValueEncoder.
|
||||
type ValueEncoderError struct {
|
||||
Name string
|
||||
Types []reflect.Type
|
||||
Kinds []reflect.Kind
|
||||
Received reflect.Value
|
||||
}
|
||||
|
||||
func (vee ValueEncoderError) Error() string {
|
||||
typeKinds := make([]string, 0, len(vee.Types)+len(vee.Kinds))
|
||||
for _, t := range vee.Types {
|
||||
typeKinds = append(typeKinds, t.String())
|
||||
}
|
||||
for _, k := range vee.Kinds {
|
||||
if k == reflect.Map {
|
||||
typeKinds = append(typeKinds, "map[string]*")
|
||||
continue
|
||||
}
|
||||
typeKinds = append(typeKinds, k.String())
|
||||
}
|
||||
received := vee.Received.Kind().String()
|
||||
if vee.Received.IsValid() {
|
||||
received = vee.Received.Type().String()
|
||||
}
|
||||
return fmt.Sprintf("%s can only encode valid %s, but got %s", vee.Name, strings.Join(typeKinds, ", "), received)
|
||||
}
|
||||
|
||||
// ValueDecoderError is an error returned from a ValueDecoder when the provided value can't be
|
||||
// decoded by the ValueDecoder.
|
||||
type ValueDecoderError struct {
|
||||
Name string
|
||||
Types []reflect.Type
|
||||
Kinds []reflect.Kind
|
||||
Received reflect.Value
|
||||
}
|
||||
|
||||
func (vde ValueDecoderError) Error() string {
|
||||
typeKinds := make([]string, 0, len(vde.Types)+len(vde.Kinds))
|
||||
for _, t := range vde.Types {
|
||||
typeKinds = append(typeKinds, t.String())
|
||||
}
|
||||
for _, k := range vde.Kinds {
|
||||
if k == reflect.Map {
|
||||
typeKinds = append(typeKinds, "map[string]*")
|
||||
continue
|
||||
}
|
||||
typeKinds = append(typeKinds, k.String())
|
||||
}
|
||||
received := vde.Received.Kind().String()
|
||||
if vde.Received.IsValid() {
|
||||
received = vde.Received.Type().String()
|
||||
}
|
||||
return fmt.Sprintf("%s can only decode valid and settable %s, but got %s", vde.Name, strings.Join(typeKinds, ", "), received)
|
||||
}
|
||||
|
||||
// EncodeContext is the contextual information required for a Codec to encode a
|
||||
// value.
|
||||
type EncodeContext struct {
|
||||
*Registry
|
||||
MinSize bool
|
||||
}
|
||||
|
||||
// DecodeContext is the contextual information required for a Codec to decode a
|
||||
// value.
|
||||
type DecodeContext struct {
|
||||
*Registry
|
||||
Truncate bool
|
||||
|
||||
// Ancestor is the type of a containing document. This is mainly used to determine what type
|
||||
// should be used when decoding an embedded document into an empty interface. For example, if
|
||||
// Ancestor is a bson.M, BSON embedded document values being decoded into an empty interface
|
||||
// will be decoded into a bson.M.
|
||||
//
|
||||
// Deprecated: Use DefaultDocumentM or DefaultDocumentD instead.
|
||||
Ancestor reflect.Type
|
||||
|
||||
// defaultDocumentType specifies the Go type to decode top-level and nested BSON documents into. In particular, the
|
||||
// usage for this field is restricted to data typed as "interface{}" or "map[string]interface{}". If DocumentType is
|
||||
// set to a type that a BSON document cannot be unmarshaled into (e.g. "string"), unmarshalling will result in an
|
||||
// error. DocumentType overrides the Ancestor field.
|
||||
defaultDocumentType reflect.Type
|
||||
}
|
||||
|
||||
// DefaultDocumentM will decode empty documents using the primitive.M type. This behavior is restricted to data typed as
|
||||
// "interface{}" or "map[string]interface{}".
|
||||
func (dc *DecodeContext) DefaultDocumentM() {
|
||||
dc.defaultDocumentType = reflect.TypeOf(primitive.M{})
|
||||
}
|
||||
|
||||
// DefaultDocumentD will decode empty documents using the primitive.D type. This behavior is restricted to data typed as
|
||||
// "interface{}" or "map[string]interface{}".
|
||||
func (dc *DecodeContext) DefaultDocumentD() {
|
||||
dc.defaultDocumentType = reflect.TypeOf(primitive.D{})
|
||||
}
|
||||
|
||||
// ValueCodec is the interface that groups the methods to encode and decode
|
||||
// values.
|
||||
type ValueCodec interface {
|
||||
ValueEncoder
|
||||
ValueDecoder
|
||||
}
|
||||
|
||||
// ValueEncoder is the interface implemented by types that can handle the encoding of a value.
|
||||
type ValueEncoder interface {
|
||||
EncodeValue(EncodeContext, bsonrw.ValueWriter, reflect.Value) error
|
||||
}
|
||||
|
||||
// ValueEncoderFunc is an adapter function that allows a function with the correct signature to be
|
||||
// used as a ValueEncoder.
|
||||
type ValueEncoderFunc func(EncodeContext, bsonrw.ValueWriter, reflect.Value) error
|
||||
|
||||
// EncodeValue implements the ValueEncoder interface.
|
||||
func (fn ValueEncoderFunc) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
|
||||
return fn(ec, vw, val)
|
||||
}
|
||||
|
||||
// ValueDecoder is the interface implemented by types that can handle the decoding of a value.
|
||||
type ValueDecoder interface {
|
||||
DecodeValue(DecodeContext, bsonrw.ValueReader, reflect.Value) error
|
||||
}
|
||||
|
||||
// ValueDecoderFunc is an adapter function that allows a function with the correct signature to be
|
||||
// used as a ValueDecoder.
|
||||
type ValueDecoderFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) error
|
||||
|
||||
// DecodeValue implements the ValueDecoder interface.
|
||||
func (fn ValueDecoderFunc) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
|
||||
return fn(dc, vr, val)
|
||||
}
|
||||
|
||||
// typeDecoder is the interface implemented by types that can handle the decoding of a value given its type.
|
||||
type typeDecoder interface {
|
||||
decodeType(DecodeContext, bsonrw.ValueReader, reflect.Type) (reflect.Value, error)
|
||||
}
|
||||
|
||||
// typeDecoderFunc is an adapter function that allows a function with the correct signature to be used as a typeDecoder.
|
||||
type typeDecoderFunc func(DecodeContext, bsonrw.ValueReader, reflect.Type) (reflect.Value, error)
|
||||
|
||||
func (fn typeDecoderFunc) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
|
||||
return fn(dc, vr, t)
|
||||
}
|
||||
|
||||
// decodeAdapter allows two functions with the correct signatures to be used as both a ValueDecoder and typeDecoder.
|
||||
type decodeAdapter struct {
|
||||
ValueDecoderFunc
|
||||
typeDecoderFunc
|
||||
}
|
||||
|
||||
var _ ValueDecoder = decodeAdapter{}
|
||||
var _ typeDecoder = decodeAdapter{}
|
||||
|
||||
// decodeTypeOrValue calls decoder.decodeType is decoder is a typeDecoder. Otherwise, it allocates a new element of type
|
||||
// t and calls decoder.DecodeValue on it.
|
||||
func decodeTypeOrValue(decoder ValueDecoder, dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
|
||||
td, _ := decoder.(typeDecoder)
|
||||
return decodeTypeOrValueWithInfo(decoder, td, dc, vr, t, true)
|
||||
}
|
||||
|
||||
func decodeTypeOrValueWithInfo(vd ValueDecoder, td typeDecoder, dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type, convert bool) (reflect.Value, error) {
|
||||
if td != nil {
|
||||
val, err := td.decodeType(dc, vr, t)
|
||||
if err == nil && convert && val.Type() != t {
|
||||
// This conversion step is necessary for slices and maps. If a user declares variables like:
|
||||
//
|
||||
// type myBool bool
|
||||
// var m map[string]myBool
|
||||
//
|
||||
// and tries to decode BSON bytes into the map, the decoding will fail if this conversion is not present
|
||||
// because we'll try to assign a value of type bool to one of type myBool.
|
||||
val = val.Convert(t)
|
||||
}
|
||||
return val, err
|
||||
}
|
||||
|
||||
val := reflect.New(t).Elem()
|
||||
err := vd.DecodeValue(dc, vr, val)
|
||||
return val, err
|
||||
}
|
||||
|
||||
// CodecZeroer is the interface implemented by Codecs that can also determine if
|
||||
// a value of the type that would be encoded is zero.
|
||||
type CodecZeroer interface {
|
||||
IsTypeZero(interface{}) bool
|
||||
}
|
143
mongo/bson/bsoncodec/bsoncodec_test.go
Normal file
143
mongo/bson/bsoncodec/bsoncodec_test.go
Normal file
@@ -0,0 +1,143 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bsoncodec
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
"go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
)
|
||||
|
||||
func ExampleValueEncoder() {
|
||||
var _ ValueEncoderFunc = func(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
|
||||
if val.Kind() != reflect.String {
|
||||
return ValueEncoderError{Name: "StringEncodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val}
|
||||
}
|
||||
|
||||
return vw.WriteString(val.String())
|
||||
}
|
||||
}
|
||||
|
||||
func ExampleValueDecoder() {
|
||||
var _ ValueDecoderFunc = func(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
|
||||
if !val.CanSet() || val.Kind() != reflect.String {
|
||||
return ValueDecoderError{Name: "StringDecodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val}
|
||||
}
|
||||
|
||||
if vr.Type() != bsontype.String {
|
||||
return fmt.Errorf("cannot decode %v into a string type", vr.Type())
|
||||
}
|
||||
|
||||
str, err := vr.ReadString()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
val.SetString(str)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func noerr(t *testing.T, err error) {
|
||||
if err != nil {
|
||||
t.Helper()
|
||||
t.Errorf("Unexpected error: (%T)%v", err, err)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func compareTime(t1, t2 time.Time) bool {
|
||||
if t1.Location() != t2.Location() {
|
||||
return false
|
||||
}
|
||||
return t1.Equal(t2)
|
||||
}
|
||||
|
||||
func compareErrors(err1, err2 error) bool {
|
||||
if err1 == nil && err2 == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if err1 == nil || err2 == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if err1.Error() != err2.Error() {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func compareDecimal128(d1, d2 primitive.Decimal128) bool {
|
||||
d1H, d1L := d1.GetBytes()
|
||||
d2H, d2L := d2.GetBytes()
|
||||
|
||||
if d1H != d2H {
|
||||
return false
|
||||
}
|
||||
|
||||
if d1L != d2L {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
type noPrivateFields struct {
|
||||
a string
|
||||
}
|
||||
|
||||
func compareNoPrivateFields(npf1, npf2 noPrivateFields) bool {
|
||||
return npf1.a != npf2.a // We don't want these to be equal
|
||||
}
|
||||
|
||||
type zeroTest struct {
|
||||
reportZero bool
|
||||
}
|
||||
|
||||
func (z zeroTest) IsZero() bool { return z.reportZero }
|
||||
|
||||
func compareZeroTest(_, _ zeroTest) bool { return true }
|
||||
|
||||
type nonZeroer struct {
|
||||
value bool
|
||||
}
|
||||
|
||||
type llCodec struct {
|
||||
t *testing.T
|
||||
decodeval interface{}
|
||||
encodeval interface{}
|
||||
err error
|
||||
}
|
||||
|
||||
func (llc *llCodec) EncodeValue(_ EncodeContext, _ bsonrw.ValueWriter, i interface{}) error {
|
||||
if llc.err != nil {
|
||||
return llc.err
|
||||
}
|
||||
|
||||
llc.encodeval = i
|
||||
return nil
|
||||
}
|
||||
|
||||
func (llc *llCodec) DecodeValue(_ DecodeContext, _ bsonrw.ValueReader, val reflect.Value) error {
|
||||
if llc.err != nil {
|
||||
return llc.err
|
||||
}
|
||||
|
||||
if !reflect.TypeOf(llc.decodeval).AssignableTo(val.Type()) {
|
||||
llc.t.Errorf("decodeval must be assignable to val provided to DecodeValue, but is not. decodeval %T; val %T", llc.decodeval, val)
|
||||
return nil
|
||||
}
|
||||
|
||||
val.Set(reflect.ValueOf(llc.decodeval))
|
||||
return nil
|
||||
}
|
111
mongo/bson/bsoncodec/byte_slice_codec.go
Normal file
111
mongo/bson/bsoncodec/byte_slice_codec.go
Normal file
@@ -0,0 +1,111 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bsoncodec
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson/bsonoptions"
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
"go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
)
|
||||
|
||||
// ByteSliceCodec is the Codec used for []byte values.
|
||||
type ByteSliceCodec struct {
|
||||
EncodeNilAsEmpty bool
|
||||
}
|
||||
|
||||
var (
|
||||
defaultByteSliceCodec = NewByteSliceCodec()
|
||||
|
||||
_ ValueCodec = defaultByteSliceCodec
|
||||
_ typeDecoder = defaultByteSliceCodec
|
||||
)
|
||||
|
||||
// NewByteSliceCodec returns a StringCodec with options opts.
|
||||
func NewByteSliceCodec(opts ...*bsonoptions.ByteSliceCodecOptions) *ByteSliceCodec {
|
||||
byteSliceOpt := bsonoptions.MergeByteSliceCodecOptions(opts...)
|
||||
codec := ByteSliceCodec{}
|
||||
if byteSliceOpt.EncodeNilAsEmpty != nil {
|
||||
codec.EncodeNilAsEmpty = *byteSliceOpt.EncodeNilAsEmpty
|
||||
}
|
||||
return &codec
|
||||
}
|
||||
|
||||
// EncodeValue is the ValueEncoder for []byte.
|
||||
func (bsc *ByteSliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
|
||||
if !val.IsValid() || val.Type() != tByteSlice {
|
||||
return ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
|
||||
}
|
||||
if val.IsNil() && !bsc.EncodeNilAsEmpty {
|
||||
return vw.WriteNull()
|
||||
}
|
||||
return vw.WriteBinary(val.Interface().([]byte))
|
||||
}
|
||||
|
||||
func (bsc *ByteSliceCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
|
||||
if t != tByteSlice {
|
||||
return emptyValue, ValueDecoderError{
|
||||
Name: "ByteSliceDecodeValue",
|
||||
Types: []reflect.Type{tByteSlice},
|
||||
Received: reflect.Zero(t),
|
||||
}
|
||||
}
|
||||
|
||||
var data []byte
|
||||
var err error
|
||||
switch vrType := vr.Type(); vrType {
|
||||
case bsontype.String:
|
||||
str, err := vr.ReadString()
|
||||
if err != nil {
|
||||
return emptyValue, err
|
||||
}
|
||||
data = []byte(str)
|
||||
case bsontype.Symbol:
|
||||
sym, err := vr.ReadSymbol()
|
||||
if err != nil {
|
||||
return emptyValue, err
|
||||
}
|
||||
data = []byte(sym)
|
||||
case bsontype.Binary:
|
||||
var subtype byte
|
||||
data, subtype, err = vr.ReadBinary()
|
||||
if err != nil {
|
||||
return emptyValue, err
|
||||
}
|
||||
if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
|
||||
return emptyValue, decodeBinaryError{subtype: subtype, typeName: "[]byte"}
|
||||
}
|
||||
case bsontype.Null:
|
||||
err = vr.ReadNull()
|
||||
case bsontype.Undefined:
|
||||
err = vr.ReadUndefined()
|
||||
default:
|
||||
return emptyValue, fmt.Errorf("cannot decode %v into a []byte", vrType)
|
||||
}
|
||||
if err != nil {
|
||||
return emptyValue, err
|
||||
}
|
||||
|
||||
return reflect.ValueOf(data), nil
|
||||
}
|
||||
|
||||
// DecodeValue is the ValueDecoder for []byte.
|
||||
func (bsc *ByteSliceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
|
||||
if !val.CanSet() || val.Type() != tByteSlice {
|
||||
return ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
|
||||
}
|
||||
|
||||
elem, err := bsc.decodeType(dc, vr, tByteSlice)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
val.Set(elem)
|
||||
return nil
|
||||
}
|
63
mongo/bson/bsoncodec/cond_addr_codec.go
Normal file
63
mongo/bson/bsoncodec/cond_addr_codec.go
Normal file
@@ -0,0 +1,63 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bsoncodec
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
)
|
||||
|
||||
// condAddrEncoder is the encoder used when a pointer to the encoding value has an encoder.
|
||||
type condAddrEncoder struct {
|
||||
canAddrEnc ValueEncoder
|
||||
elseEnc ValueEncoder
|
||||
}
|
||||
|
||||
var _ ValueEncoder = (*condAddrEncoder)(nil)
|
||||
|
||||
// newCondAddrEncoder returns an condAddrEncoder.
|
||||
func newCondAddrEncoder(canAddrEnc, elseEnc ValueEncoder) *condAddrEncoder {
|
||||
encoder := condAddrEncoder{canAddrEnc: canAddrEnc, elseEnc: elseEnc}
|
||||
return &encoder
|
||||
}
|
||||
|
||||
// EncodeValue is the ValueEncoderFunc for a value that may be addressable.
|
||||
func (cae *condAddrEncoder) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
|
||||
if val.CanAddr() {
|
||||
return cae.canAddrEnc.EncodeValue(ec, vw, val)
|
||||
}
|
||||
if cae.elseEnc != nil {
|
||||
return cae.elseEnc.EncodeValue(ec, vw, val)
|
||||
}
|
||||
return ErrNoEncoder{Type: val.Type()}
|
||||
}
|
||||
|
||||
// condAddrDecoder is the decoder used when a pointer to the value has a decoder.
|
||||
type condAddrDecoder struct {
|
||||
canAddrDec ValueDecoder
|
||||
elseDec ValueDecoder
|
||||
}
|
||||
|
||||
var _ ValueDecoder = (*condAddrDecoder)(nil)
|
||||
|
||||
// newCondAddrDecoder returns an CondAddrDecoder.
|
||||
func newCondAddrDecoder(canAddrDec, elseDec ValueDecoder) *condAddrDecoder {
|
||||
decoder := condAddrDecoder{canAddrDec: canAddrDec, elseDec: elseDec}
|
||||
return &decoder
|
||||
}
|
||||
|
||||
// DecodeValue is the ValueDecoderFunc for a value that may be addressable.
|
||||
func (cad *condAddrDecoder) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
|
||||
if val.CanAddr() {
|
||||
return cad.canAddrDec.DecodeValue(dc, vr, val)
|
||||
}
|
||||
if cad.elseDec != nil {
|
||||
return cad.elseDec.DecodeValue(dc, vr, val)
|
||||
}
|
||||
return ErrNoDecoder{Type: val.Type()}
|
||||
}
|
97
mongo/bson/bsoncodec/cond_addr_codec_test.go
Normal file
97
mongo/bson/bsoncodec/cond_addr_codec_test.go
Normal file
@@ -0,0 +1,97 @@
|
||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
package bsoncodec
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw/bsonrwtest"
|
||||
"go.mongodb.org/mongo-driver/internal/testutil/assert"
|
||||
)
|
||||
|
||||
func TestCondAddrCodec(t *testing.T) {
|
||||
var inner int
|
||||
canAddrVal := reflect.ValueOf(&inner)
|
||||
addressable := canAddrVal.Elem()
|
||||
unaddressable := reflect.ValueOf(inner)
|
||||
rw := &bsonrwtest.ValueReaderWriter{}
|
||||
|
||||
t.Run("addressEncode", func(t *testing.T) {
|
||||
invoked := 0
|
||||
encode1 := ValueEncoderFunc(func(EncodeContext, bsonrw.ValueWriter, reflect.Value) error {
|
||||
invoked = 1
|
||||
return nil
|
||||
})
|
||||
encode2 := ValueEncoderFunc(func(EncodeContext, bsonrw.ValueWriter, reflect.Value) error {
|
||||
invoked = 2
|
||||
return nil
|
||||
})
|
||||
condEncoder := newCondAddrEncoder(encode1, encode2)
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
val reflect.Value
|
||||
invoked int
|
||||
}{
|
||||
{"canAddr", addressable, 1},
|
||||
{"else", unaddressable, 2},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
err := condEncoder.EncodeValue(EncodeContext{}, rw, tc.val)
|
||||
assert.Nil(t, err, "CondAddrEncoder error: %v", err)
|
||||
|
||||
assert.Equal(t, invoked, tc.invoked, "Expected function %v to be called, called %v", tc.invoked, invoked)
|
||||
})
|
||||
}
|
||||
|
||||
t.Run("error", func(t *testing.T) {
|
||||
errEncoder := newCondAddrEncoder(encode1, nil)
|
||||
err := errEncoder.EncodeValue(EncodeContext{}, rw, unaddressable)
|
||||
want := ErrNoEncoder{Type: unaddressable.Type()}
|
||||
assert.Equal(t, err, want, "expected error %v, got %v", want, err)
|
||||
})
|
||||
})
|
||||
t.Run("addressDecode", func(t *testing.T) {
|
||||
invoked := 0
|
||||
decode1 := ValueDecoderFunc(func(DecodeContext, bsonrw.ValueReader, reflect.Value) error {
|
||||
invoked = 1
|
||||
return nil
|
||||
})
|
||||
decode2 := ValueDecoderFunc(func(DecodeContext, bsonrw.ValueReader, reflect.Value) error {
|
||||
invoked = 2
|
||||
return nil
|
||||
})
|
||||
condDecoder := newCondAddrDecoder(decode1, decode2)
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
val reflect.Value
|
||||
invoked int
|
||||
}{
|
||||
{"canAddr", addressable, 1},
|
||||
{"else", unaddressable, 2},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
err := condDecoder.DecodeValue(DecodeContext{}, rw, tc.val)
|
||||
assert.Nil(t, err, "CondAddrDecoder error: %v", err)
|
||||
|
||||
assert.Equal(t, invoked, tc.invoked, "Expected function %v to be called, called %v", tc.invoked, invoked)
|
||||
})
|
||||
}
|
||||
|
||||
t.Run("error", func(t *testing.T) {
|
||||
errDecoder := newCondAddrDecoder(decode1, nil)
|
||||
err := errDecoder.DecodeValue(DecodeContext{}, rw, unaddressable)
|
||||
want := ErrNoDecoder{Type: unaddressable.Type()}
|
||||
assert.Equal(t, err, want, "expected error %v, got %v", want, err)
|
||||
})
|
||||
})
|
||||
}
|
1729
mongo/bson/bsoncodec/default_value_decoders.go
Normal file
1729
mongo/bson/bsoncodec/default_value_decoders.go
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user