Compare commits
245 Commits
feature/mo
...
v0.0.391
| Author | SHA1 | Date | |
|---|---|---|---|
|
4b55dbaacf
|
|||
|
c399fa42ae
|
|||
|
9e586f7706
|
|||
|
3cc8dccc63
|
|||
|
7fedfbca81
|
|||
|
3c439ba428
|
|||
|
ad24f6db44
|
|||
|
1869ff3d75
|
|||
|
30ce8c4b60
|
|||
| 885bb53244 | |||
| 1c7dc1820a | |||
|
7e16e799e4
|
|||
|
890e16241d
|
|||
|
b9d0348735
|
|||
|
b9e9575b9b
|
|||
|
295a098eb4
|
|||
|
b69a082bb1
|
|||
|
a4a8c83d17
|
|||
|
e952176bb0
|
|||
|
d99adb203b
|
|||
|
f1f91f4cfa
|
|||
|
2afb265ea4
|
|||
|
be24f7a190
|
|||
|
aae8a706e9
|
|||
|
7d64f18f54
|
|||
|
d08b2e565a
|
|||
|
d29e84894d
|
|||
|
617298c366
|
|||
| 668f308565 | |||
|
240a8ed7aa
|
|||
|
70de8e8d04
|
|||
|
d38fa60fbc
|
|||
|
5fba7e0e2f
|
|||
|
8757643399
|
|||
|
42bd4cf58d
|
|||
|
413178e2d3
|
|||
|
9264a2e99b
|
|||
|
2a0471fb3d
|
|||
|
1497c013f9
|
|||
|
ef78b7467b
|
|||
|
0eda32b725
|
|||
|
f9ccafb976
|
|||
|
6e90239fef
|
|||
|
05580c384a
|
|||
|
3188b951fb
|
|||
|
6b211d1443
|
|||
|
b2b9b40792
|
|||
|
2f915cb6c1
|
|||
|
b2b93f570a
|
|||
|
8247fc4524
|
|||
|
5dad44ad09
|
|||
|
f042183433
|
|||
|
b0be93a7a0
|
|||
|
1c143921e6
|
|||
|
68e63a9cf6
|
|||
|
c3162fec95
|
|||
|
1124aa781a
|
|||
|
eef0e9f2aa
|
|||
|
af38b06d22
|
|||
|
2fad6340c7
|
|||
|
03aa0a2282
|
|||
|
358c238f3d
|
|||
|
d65ac8ba2b
|
|||
|
55d02b8c65
|
|||
|
8a3965f666
|
|||
|
4aa2f494b1
|
|||
|
8f13eb2f16
|
|||
|
8f15d42173
|
|||
|
07fa21dcca
|
|||
|
e657de7f78
|
|||
|
c534e998e8
|
|||
|
88642770c5
|
|||
| 8528b5cb66 | |||
|
5ba84bd8ee
|
|||
|
1260b2dc77
|
|||
|
7d18b913c6
|
|||
| d1f9069f2f | |||
|
fa6d73301e
|
|||
|
bfe62799d3
|
|||
|
ede912eb7b
|
|||
|
ff8f128fe8
|
|||
|
1971f1396f
|
|||
|
bf6c184d12
|
|||
|
770f5c5c64
|
|||
|
623c021689
|
|||
|
afcc89bf9e
|
|||
|
1672e8f8fd
|
|||
|
398ed56d32
|
|||
|
f3ecba3883
|
|||
|
45031b05cf
|
|||
|
7413ea045d
|
|||
|
62c9a4e734
|
|||
|
3a8baaa6d9
|
|||
|
498785e213
|
|||
|
678f95642c
|
|||
|
dacc97e2ce
|
|||
|
f8c0c0afa0
|
|||
|
2fbd5cf965
|
|||
|
75f71fe3db
|
|||
|
ab1a1ab6f6
|
|||
|
19ee5019ef
|
|||
|
42b68507f2
|
|||
|
9d0047a11e
|
|||
|
06d81f1682
|
|||
|
7b8ab03779
|
|||
|
07cbcf5a0a
|
|||
|
da41ec3e84
|
|||
|
592fae25af
|
|||
|
7968460fa2
|
|||
|
b808c5727c
|
|||
|
796f7956b8
|
|||
| 1e6b92d1d9 | |||
| 0b85fa5af9 | |||
| c3318cc1de | |||
| fbf4d7b915 | |||
| 9cc0abf9e0 | |||
| 7c40bcfd3c | |||
| 05636a1e4d | |||
| 0f52b860ea | |||
| b5cd116219 | |||
|
98486842ae
|
|||
|
7577a2dd47
|
|||
|
08681756b6
|
|||
| 64772d0474 | |||
| 127764556e | |||
| 170f43d806 | |||
|
9dffc41274
|
|||
|
c63cf442f8
|
|||
| a2ba283632 | |||
| 4a1fb1ae18 | |||
| a127b24e62 | |||
| 69d6290376 | |||
|
c08a739158
|
|||
| 5f5f0e44f0 | |||
| 6e6797eac5 | |||
| cd9406900a | |||
| 6c81f7f6bc | |||
| d56a0235af | |||
| de2ca763c1 | |||
|
da52bb5c90
|
|||
|
3d4afe7b25
|
|||
|
f5766d639c
|
|||
|
cdf2a6e76b
|
|||
| 6d7cfb86f8 | |||
| 1e9d663ffe | |||
| 5b8d7ebf87 | |||
| 11dc6d2640 | |||
| 29a3f73f15 | |||
| 98105642fc | |||
| 0fd5f3b417 | |||
| 43cac4b3bb | |||
|
cd68af8e66
|
|||
|
113d838876
|
|||
|
9e5bc0d3ea
|
|||
|
6d3bd13f61
|
|||
|
b5ca475b3f
|
|||
|
a75b1291cb
|
|||
|
21cd1ee066
|
|||
|
ae43cbb623
|
|||
|
9b752a911c
|
|||
|
ec9ac26a4c
|
|||
|
39a0b73d56
|
|||
|
2e2e15d4d2
|
|||
|
0d16946aba
|
|||
|
14441c2378
|
|||
|
f6bcdc9903
|
|||
|
a95053211c
|
|||
| 813ce71e3e | |||
| 56ae0cfc6c | |||
|
202afc9068
|
|||
|
56094b3cb6
|
|||
|
0da098e9f9
|
|||
|
f0881c9fd6
|
|||
|
029b408749
|
|||
|
84b2be3169
|
|||
|
c872cecc67
|
|||
|
99cd92729e
|
|||
|
ac416f7b69
|
|||
|
e10140e143
|
|||
|
e165f0f62f
|
|||
|
655d4daad9
|
|||
|
87a004e577
|
|||
|
376c6cab50
|
|||
|
4a3f25baa0
|
|||
|
aa33bc8df3
|
|||
|
96b3718375
|
|||
|
5f9b55933b
|
|||
| 74d42637e7 | |||
| 0c05bcf29b | |||
|
9136143f2f
|
|||
|
2f1b784dc2
|
|||
|
190584e0e6
|
|||
|
b7003b9ec9
|
|||
|
4f871271e8
|
|||
|
91f4793678
|
|||
| 3b30bb049e | |||
| f0c5b36ea9 | |||
|
647ec64c3b
|
|||
|
b5f9b6b638
|
|||
|
c7949febf2
|
|||
| 15a4b2a713 | |||
| 493c6ebae8 | |||
| fb847b03af | |||
|
f826633e6e
|
|||
|
edeae23bf1
|
|||
|
a038b86147
|
|||
|
ede0b99d3a
|
|||
|
d04ce18eb0
|
|||
|
8ae9a0f107
|
|||
|
a259bb6dbc
|
|||
|
adf32568ee
|
|||
|
0cfa159cb1
|
|||
|
0ead99608a
|
|||
|
7fe3e66cad
|
|||
|
a73d7d1654
|
|||
|
bbd7a7bc2c
|
|||
|
f5151eb214
|
|||
|
eefb9ac9f5
|
|||
|
468a7d212d
|
|||
|
a4def75d06
|
|||
|
16c66ee28c
|
|||
|
2e6ca48d22
|
|||
| b1d6509294 | |||
| e909d656d9 | |||
|
0971f60c30
|
|||
|
d8270e53ed
|
|||
|
1ee127937a
|
|||
|
56684b2c0b
|
|||
| 1ea6695f82 | |||
| 5273ff7600 | |||
| caa69c3629 | |||
| 0ff5f0aa28 | |||
| d5cb1e48ed | |||
|
1c2d3f541f
|
|||
|
ec62ad436f
|
|||
|
8d0ef0f002
|
|||
|
d78550672e
|
|||
|
1d629f6db8
|
|||
|
f7d291056d
|
|||
|
710c257c64
|
|||
| c320bb3d90 | |||
|
2f01a1d50f
|
|||
|
ffc57b7e89
|
|||
|
d88cd3c22b
|
|||
|
ac5ad640bd
|
55
.gitea/workflows/tests.yml
Normal file
55
.gitea/workflows/tests.yml
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
|
||||||
|
# https://docs.gitea.com/next/usage/actions/quickstart
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions
|
||||||
|
# https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
|
||||||
|
|
||||||
|
name: Build Docker and Deploy
|
||||||
|
run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }}
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '*'
|
||||||
|
- '**'
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run_tests:
|
||||||
|
name: Run goext test-suite
|
||||||
|
runs-on: bfb-cicd-latest
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup go
|
||||||
|
uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version-file: '${{ gitea.workspace }}/go.mod'
|
||||||
|
|
||||||
|
- name: Setup packages
|
||||||
|
uses: awalsh128/cache-apt-pkgs-action@latest
|
||||||
|
with:
|
||||||
|
packages: curl python3
|
||||||
|
version: 1.0
|
||||||
|
|
||||||
|
- name: go version
|
||||||
|
run: go version
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: cd "${{ gitea.workspace }}" && make test
|
||||||
|
|
||||||
|
- name: Send failure mail
|
||||||
|
if: failure()
|
||||||
|
uses: dawidd6/action-send-mail@v3
|
||||||
|
with:
|
||||||
|
server_address: smtp.fastmail.com
|
||||||
|
server_port: 465
|
||||||
|
secure: true
|
||||||
|
username: ${{secrets.MAIL_USERNAME}}
|
||||||
|
password: ${{secrets.MAIL_PASSWORD}}
|
||||||
|
subject: Pipeline on '${{ gitea.repository }}' failed
|
||||||
|
to: ${{ steps.commiter_info.outputs.MAIL }}
|
||||||
|
from: Gitea Actions <gitea_actions@blackforestbytes.de>
|
||||||
|
body: "Go to https://gogs.blackforestbytes.com/${{ gitea.repository }}/actions"
|
||||||
|
|
||||||
6
.idea/golinter.xml
generated
Normal file
6
.idea/golinter.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="GoLinterSettings">
|
||||||
|
<option name="checkGoLinterExe" value="false" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
6
.idea/sqldialects.xml
generated
Normal file
6
.idea/sqldialects.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="SqlDialectMappings">
|
||||||
|
<file url="file://$PROJECT_DIR$/sq/sq_test.go" dialect="SQLite" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
15
Makefile
15
Makefile
@@ -1,16 +1,17 @@
|
|||||||
|
|
||||||
.PHONY: run test version update-mongo
|
|
||||||
|
|
||||||
run:
|
run:
|
||||||
echo "This is a library - can't be run" && false
|
echo "This is a library - can't be run" && false
|
||||||
|
|
||||||
test:
|
test:
|
||||||
# go test ./...
|
# go test ./...
|
||||||
which gotestsum || go install gotest.tools/gotestsum@latest
|
which gotestsum || go install gotest.tools/gotestsum@latest
|
||||||
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test"
|
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./..."
|
||||||
|
|
||||||
|
test-in-docker:
|
||||||
|
tag="goext_temp_test_image:$(shell uuidgen | tr -d '-')"; \
|
||||||
|
docker build --tag $$tag . -f .gitea/workflows/Dockerfile_tests; \
|
||||||
|
docker run --rm $$tag; \
|
||||||
|
docker rmi $$tag
|
||||||
|
|
||||||
version:
|
version:
|
||||||
_data/version.sh
|
_data/version.sh
|
||||||
|
|
||||||
update-mongo:
|
|
||||||
_data/update-mongo.sh
|
|
||||||
62
README.md
62
README.md
@@ -10,32 +10,36 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
|
|||||||
|
|
||||||
### Packages:
|
### Packages:
|
||||||
|
|
||||||
| Name | Maintainer | Description |
|
| Name | Maintainer | Description |
|
||||||
|--------------|------------|---------------------------------------------------------------------------------------------------------------|
|
|-------------|------------|---------------------------------------------------------------------------------------------------------------|
|
||||||
| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) |
|
| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) |
|
||||||
| mathext | Mike | Utility/Helper functions for math |
|
| mathext | Mike | Utility/Helper functions for math |
|
||||||
| cryptext | Mike | Utility/Helper functions for encryption |
|
| cryptext | Mike | Utility/Helper functions for encryption |
|
||||||
| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels |
|
| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels |
|
||||||
| dataext | Mike | Various useful data structures |
|
| dataext | Mike | Various useful data structures |
|
||||||
| zipext | Mike | Utility for zip/gzip/tar etc |
|
| zipext | Mike | Utility for zip/gzip/tar etc |
|
||||||
| reflectext | Mike | Utility for golagn reflection |
|
| reflectext | Mike | Utility for golang reflection |
|
||||||
| | | |
|
| fsext | Mike | Utility for filesytem access |
|
||||||
| mongoext | Mike | Utility/Helper functions for mongodb |
|
| | | |
|
||||||
| cursortoken | Mike | MongoDB cursortoken implementation |
|
| mongoext | Mike | Utility/Helper functions for mongodb |
|
||||||
| | | |
|
| cursortoken | Mike | MongoDB cursortoken implementation |
|
||||||
| totpext | Mike | Implementation of TOTP (2-Factor-Auth) |
|
| pagination | Mike | Pagination implementation |
|
||||||
| termext | Mike | Utilities for terminals (mostly color output) |
|
| | | |
|
||||||
| confext | Mike | Parses environment configuration into structs |
|
| totpext | Mike | Implementation of TOTP (2-Factor-Auth) |
|
||||||
| cmdext | Mike | Runner for external commands/processes |
|
| termext | Mike | Utilities for terminals (mostly color output) |
|
||||||
| | | |
|
| confext | Mike | Parses environment configuration into structs |
|
||||||
| sq | Mike | Utility functions for sql based databases |
|
| cmdext | Mike | Runner for external commands/processes |
|
||||||
| tst | Mike | Utility functions for unit tests |
|
| | | |
|
||||||
| | | |
|
| sq | Mike | Utility functions for sql based databases |
|
||||||
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
|
| tst | Mike | Utility functions for unit tests |
|
||||||
| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps |
|
| | | |
|
||||||
| | | |
|
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
|
||||||
| bfcodegen | Mike | Various codegen tools (run via go generate) |
|
| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps |
|
||||||
| | | |
|
| | | |
|
||||||
| rext | Mike | Regex Wrapper, wraps regexp with a better interface |
|
| bfcodegen | Mike | Various codegen tools (run via go generate) |
|
||||||
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
|
| | | |
|
||||||
| | | |
|
| rext | Mike | Regex Wrapper, wraps regexp with a better interface |
|
||||||
|
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
|
||||||
|
| | | |
|
||||||
|
| scn | Mike | SimpleCloudNotifier |
|
||||||
|
| | | |
|
||||||
12
TODO.md
12
TODO.md
@@ -2,12 +2,6 @@
|
|||||||
|
|
||||||
- cronext
|
- cronext
|
||||||
|
|
||||||
- cursortoken
|
- rfctime.DateOnly
|
||||||
|
- rfctime.HMSTimeOnly
|
||||||
- typed/geenric mongo wrapper
|
- rfctime.NanoTimeOnly
|
||||||
|
|
||||||
- error package
|
|
||||||
|
|
||||||
- rfctime.DateOnly
|
|
||||||
- rfctime.HMSTimeOnly
|
|
||||||
- rfctime.NanoTimeOnly
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
diff --git a/mongo/bson/bsoncodec/struct_codec.go b/mongo/bson/bsoncodec/struct_codec.go
|
|
||||||
--- a/mongo/bson/bsoncodec/struct_codec.go
|
|
||||||
+++ b/mongo/bson/bsoncodec/struct_codec.go
|
|
||||||
@@ -122,6 +122,10 @@ func (sc *StructCodec) EncodeValue(r EncodeContext, vw bsonrw.ValueWriter, val r
|
|
||||||
}
|
|
||||||
var rv reflect.Value
|
|
||||||
for _, desc := range sd.fl {
|
|
||||||
+ if desc.omitAlways {
|
|
||||||
+ continue
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
if desc.inline == nil {
|
|
||||||
rv = val.Field(desc.idx)
|
|
||||||
} else {
|
|
||||||
@@ -400,15 +404,16 @@ type structDescription struct {
|
|
||||||
}
|
|
||||||
|
|
||||||
type fieldDescription struct {
|
|
||||||
- name string // BSON key name
|
|
||||||
- fieldName string // struct field name
|
|
||||||
- idx int
|
|
||||||
- omitEmpty bool
|
|
||||||
- minSize bool
|
|
||||||
- truncate bool
|
|
||||||
- inline []int
|
|
||||||
- encoder ValueEncoder
|
|
||||||
- decoder ValueDecoder
|
|
||||||
+ name string // BSON key name
|
|
||||||
+ fieldName string // struct field name
|
|
||||||
+ idx int
|
|
||||||
+ omitEmpty bool
|
|
||||||
+ omitAlways bool
|
|
||||||
+ minSize bool
|
|
||||||
+ truncate bool
|
|
||||||
+ inline []int
|
|
||||||
+ encoder ValueEncoder
|
|
||||||
+ decoder ValueDecoder
|
|
||||||
}
|
|
||||||
|
|
||||||
type byIndex []fieldDescription
|
|
||||||
@@ -491,6 +496,7 @@ func (sc *StructCodec) describeStruct(r *Registry, t reflect.Type) (*structDescr
|
|
||||||
}
|
|
||||||
description.name = stags.Name
|
|
||||||
description.omitEmpty = stags.OmitEmpty
|
|
||||||
+ description.omitAlways = stags.OmitAlways
|
|
||||||
description.minSize = stags.MinSize
|
|
||||||
description.truncate = stags.Truncate
|
|
||||||
|
|
||||||
diff --git a/mongo/bson/bsoncodec/struct_tag_parser.go b/mongo/bson/bsoncodec/struct_tag_parser.go
|
|
||||||
--- a/mongo/bson/bsoncodec/struct_tag_parser.go
|
|
||||||
+++ b/mongo/bson/bsoncodec/struct_tag_parser.go
|
|
||||||
@@ -52,12 +52,13 @@ func (stpf StructTagParserFunc) ParseStructTags(sf reflect.StructField) (StructT
|
|
||||||
//
|
|
||||||
// TODO(skriptble): Add tags for undefined as nil and for null as nil.
|
|
||||||
type StructTags struct {
|
|
||||||
- Name string
|
|
||||||
- OmitEmpty bool
|
|
||||||
- MinSize bool
|
|
||||||
- Truncate bool
|
|
||||||
- Inline bool
|
|
||||||
- Skip bool
|
|
||||||
+ Name string
|
|
||||||
+ OmitEmpty bool
|
|
||||||
+ OmitAlways bool
|
|
||||||
+ MinSize bool
|
|
||||||
+ Truncate bool
|
|
||||||
+ Inline bool
|
|
||||||
+ Skip bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// DefaultStructTagParser is the StructTagParser used by the StructCodec by default.
|
|
||||||
@@ -108,6 +109,8 @@ func parseTags(key string, tag string) (StructTags, error) {
|
|
||||||
switch str {
|
|
||||||
case "omitempty":
|
|
||||||
st.OmitEmpty = true
|
|
||||||
+ case "omitalways":
|
|
||||||
+ st.OmitAlways = true
|
|
||||||
case "minsize":
|
|
||||||
st.MinSize = true
|
|
||||||
case "truncate":
|
|
||||||
@@ -1,95 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
|
|
||||||
set -o nounset # disallow usage of unset vars ( set -u )
|
|
||||||
set -o errexit # Exit immediately if a pipeline returns non-zero. ( set -e )
|
|
||||||
set -o errtrace # Allow the above trap be inherited by all functions in the script. ( set -E )
|
|
||||||
set -o pipefail # Return value of a pipeline is the value of the last (rightmost) command to exit with a non-zero status
|
|
||||||
IFS=$'\n\t' # Set $IFS to only newline and tab.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
dir="/tmp/mongo_repo_$( uuidgen )"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "> Clone https://github.dev/mongodb/mongo-go-driver"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
git clone "https://github.com/mongodb/mongo-go-driver" "$dir"
|
|
||||||
|
|
||||||
pushd "$dir"
|
|
||||||
|
|
||||||
git fetch --tags
|
|
||||||
|
|
||||||
latestTag="$( git describe --tags `git rev-list --tags --max-count=1` )"
|
|
||||||
|
|
||||||
git -c "advice.detachedHead=false" checkout $latestTag
|
|
||||||
|
|
||||||
latestSHA="$( git rev-parse HEAD )"
|
|
||||||
|
|
||||||
popd
|
|
||||||
|
|
||||||
existingTag=$( cat mongoPatchVersion.go | grep -oP "(?<=const MongoCloneTag = \")([A-Za-z0-9.]+)(?=\")" )
|
|
||||||
existingSHA=$( cat mongoPatchVersion.go | grep -oP "(?<=const MongoCloneCommit = \")([A-Za-z0-9.]+)(?=\")" )
|
|
||||||
|
|
||||||
echo "===================================="
|
|
||||||
echo "ID (online) $latestSHA"
|
|
||||||
echo "ID (local) $existingSHA"
|
|
||||||
echo "Tag (online) $latestTag"
|
|
||||||
echo "Tag (local) $existingTag"
|
|
||||||
echo "===================================="
|
|
||||||
|
|
||||||
if [[ "$latestTag" == "$existingTag" ]]; then
|
|
||||||
echo "Nothing to do"
|
|
||||||
rm -rf "$dir"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "> Copy repository"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
rm -rf mongo
|
|
||||||
cp -r "$dir" "mongo"
|
|
||||||
rm -rf "$dir"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "> Clean repository"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
rm -rf "mongo/.git"
|
|
||||||
rm -rf "mongo/.evergreen"
|
|
||||||
rm -rf "mongo/cmd"
|
|
||||||
rm -rf "mongo/docs"
|
|
||||||
rm -rf "mongo/etc"
|
|
||||||
rm -rf "mongo/examples"
|
|
||||||
rm -rf "mongo/testdata"
|
|
||||||
rm -rf "mongo/benchmark"
|
|
||||||
rm -rf "mongo/vendor"
|
|
||||||
rm -rf "mongo/internal/test"
|
|
||||||
rm -rf "mongo/go.mod"
|
|
||||||
rm -rf "mongo/go.sum"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "> Update mongoPatchVersion.go"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
{
|
|
||||||
|
|
||||||
printf "package goext\n"
|
|
||||||
printf "\n"
|
|
||||||
printf "// %s\n" "$( date +"%Y-%m-%d %H:%M:%S%z" )"
|
|
||||||
printf "\n"
|
|
||||||
printf "const MongoCloneTag = \"%s\"\n" "$latestTag"
|
|
||||||
printf "const MongoCloneCommit = \"%s\"\n" "$latestSHA"
|
|
||||||
|
|
||||||
} > mongoPatchVersion.go
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "> Patch mongo"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
git apply -v _data/mongo.patch
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "Done."
|
|
||||||
@@ -21,6 +21,11 @@ if [ "$( git rev-parse --abbrev-ref HEAD )" != "master" ]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -n "Insert optional commit message: "
|
||||||
|
read commitMessage
|
||||||
|
echo ""
|
||||||
|
|
||||||
git pull --ff
|
git pull --ff
|
||||||
|
|
||||||
go get -u ./...
|
go get -u ./...
|
||||||
@@ -40,6 +45,11 @@ git add --verbose .
|
|||||||
|
|
||||||
msg="v${next_ver}"
|
msg="v${next_ver}"
|
||||||
|
|
||||||
|
if [[ "$commitMessage" != "" ]]; then
|
||||||
|
msg="${msg} ${commitMessage}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
if [ $# -gt 0 ]; then
|
if [ $# -gt 0 ]; then
|
||||||
msg="$1"
|
msg="$1"
|
||||||
fi
|
fi
|
||||||
|
|||||||
BIN
bfcodegen/_test_example_1.tgz
Normal file
BIN
bfcodegen/_test_example_1.tgz
Normal file
Binary file not shown.
BIN
bfcodegen/_test_example_2.tgz
Normal file
BIN
bfcodegen/_test_example_2.tgz
Normal file
Binary file not shown.
182
bfcodegen/csid-generate.go
Normal file
182
bfcodegen/csid-generate.go
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
package bfcodegen
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
_ "embed"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CSIDDef struct {
|
||||||
|
File string
|
||||||
|
FileRelative string
|
||||||
|
Name string
|
||||||
|
Prefix string
|
||||||
|
}
|
||||||
|
|
||||||
|
var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
|
||||||
|
|
||||||
|
var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`))
|
||||||
|
|
||||||
|
var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
|
||||||
|
|
||||||
|
//go:embed csid-generate.template
|
||||||
|
var templateCSIDGenerateText string
|
||||||
|
|
||||||
|
func GenerateCharsetIDSpecs(sourceDir string, destFile string) error {
|
||||||
|
|
||||||
|
files, err := os.ReadDir(sourceDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
oldChecksum := "N/A"
|
||||||
|
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
||||||
|
content, err := os.ReadFile(destFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if m, ok := rexCSIDChecksumConst.MatchFirst(string(content)); ok {
|
||||||
|
oldChecksum = m.GroupByName("cs").Value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
|
||||||
|
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
||||||
|
|
||||||
|
newChecksumStr := goext.GoextVersion
|
||||||
|
for _, f := range files {
|
||||||
|
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
|
||||||
|
|
||||||
|
if newChecksum != oldChecksum {
|
||||||
|
fmt.Printf("[CSIDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("[CSIDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
allIDs := make([]CSIDDef, 0)
|
||||||
|
|
||||||
|
pkgname := ""
|
||||||
|
|
||||||
|
for _, f := range files {
|
||||||
|
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||||
|
fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
allIDs = append(allIDs, fileIDs...)
|
||||||
|
|
||||||
|
if pn != "" {
|
||||||
|
pkgname = pn
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkgname == "" {
|
||||||
|
return errors.New("no package name found in any file")
|
||||||
|
}
|
||||||
|
|
||||||
|
fdata, err := format.Source([]byte(fmtCSIDOutput(newChecksum, allIDs, pkgname)))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.WriteFile(destFile, fdata, 0o755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) {
|
||||||
|
file, err := os.Open(fn)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
bin, err := io.ReadAll(file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := strings.Split(string(bin), "\n")
|
||||||
|
|
||||||
|
ids := make([]CSIDDef, 0)
|
||||||
|
|
||||||
|
pkgname := ""
|
||||||
|
|
||||||
|
for i, line := range lines {
|
||||||
|
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if match, ok := rexCSIDPackage.MatchFirst(line); i == 0 && ok {
|
||||||
|
pkgname = match.GroupByName("name").Value()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if match, ok := rexCSIDDef.MatchFirst(line); ok {
|
||||||
|
|
||||||
|
rfp, err := filepath.Rel(basedir, fn)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
def := CSIDDef{
|
||||||
|
File: fn,
|
||||||
|
FileRelative: rfp,
|
||||||
|
Name: match.GroupByName("name").Value(),
|
||||||
|
Prefix: match.GroupByName("prefix").Value(),
|
||||||
|
}
|
||||||
|
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
|
||||||
|
ids = append(ids, def)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ids, pkgname, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fmtCSIDOutput(cs string, ids []CSIDDef, pkgname string) string {
|
||||||
|
templ := template.Must(template.New("csid-generate").Parse(templateCSIDGenerateText))
|
||||||
|
|
||||||
|
buffer := bytes.Buffer{}
|
||||||
|
|
||||||
|
err := templ.Execute(&buffer, langext.H{
|
||||||
|
"PkgName": pkgname,
|
||||||
|
"Checksum": cs,
|
||||||
|
"GoextVersion": goext.GoextVersion,
|
||||||
|
"IDs": ids,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return buffer.String()
|
||||||
|
}
|
||||||
190
bfcodegen/csid-generate.template
Normal file
190
bfcodegen/csid-generate.template
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
// Code generated by csid-generate.go DO NOT EDIT.
|
||||||
|
|
||||||
|
package {{.PkgName}}
|
||||||
|
|
||||||
|
import "crypto/rand"
|
||||||
|
import "crypto/sha256"
|
||||||
|
import "fmt"
|
||||||
|
import "github.com/go-playground/validator/v10"
|
||||||
|
import "github.com/rs/zerolog/log"
|
||||||
|
import "gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||||
|
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
import "gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||||
|
import "math/big"
|
||||||
|
import "reflect"
|
||||||
|
import "regexp"
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
const ChecksumCharsetIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
|
||||||
|
|
||||||
|
const idlen = 24
|
||||||
|
|
||||||
|
const checklen = 1
|
||||||
|
|
||||||
|
const idCharset = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||||
|
const idCharsetLen = len(idCharset)
|
||||||
|
|
||||||
|
var charSetReverseMap = generateCharsetMap()
|
||||||
|
|
||||||
|
const ({{range .IDs}}
|
||||||
|
prefix{{.Name}} = "{{.Prefix}}" {{end}}
|
||||||
|
)
|
||||||
|
|
||||||
|
var ({{range .IDs}}
|
||||||
|
regex{{.Name}} = generateRegex(prefix{{.Name}}) {{end}}
|
||||||
|
)
|
||||||
|
|
||||||
|
func generateRegex(prefix string) rext.Regex {
|
||||||
|
return rext.W(regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateCharsetMap() []int {
|
||||||
|
result := make([]int, 128)
|
||||||
|
for i := 0; i < len(result); i++ {
|
||||||
|
result[i] = -1
|
||||||
|
}
|
||||||
|
for idx, chr := range idCharset {
|
||||||
|
result[int(chr)] = idx
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateID(prefix string) string {
|
||||||
|
k := ""
|
||||||
|
csMax := big.NewInt(int64(idCharsetLen))
|
||||||
|
checksum := 0
|
||||||
|
for i := 0; i < idlen-len(prefix)-checklen; i++ {
|
||||||
|
v, err := rand.Int(rand.Reader, csMax)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
v64 := v.Int64()
|
||||||
|
k += string(idCharset[v64])
|
||||||
|
checksum = (checksum + int(v64)) % (idCharsetLen)
|
||||||
|
}
|
||||||
|
checkstr := string(idCharset[checksum%idCharsetLen])
|
||||||
|
return prefix + k + checkstr
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateIDFromSeed(prefix string, seed string) string {
|
||||||
|
h := sha256.New()
|
||||||
|
|
||||||
|
iddata := ""
|
||||||
|
for len(iddata) < idlen-len(prefix)-checklen {
|
||||||
|
h.Write([]byte(seed))
|
||||||
|
bs := h.Sum(nil)
|
||||||
|
iddata += langext.NewAnyBaseConverter(idCharset).Encode(bs)
|
||||||
|
}
|
||||||
|
|
||||||
|
checksum := 0
|
||||||
|
for i := 0; i < idlen-len(prefix)-checklen; i++ {
|
||||||
|
ichr := int(iddata[i])
|
||||||
|
checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen)
|
||||||
|
}
|
||||||
|
|
||||||
|
checkstr := string(idCharset[checksum%idCharsetLen])
|
||||||
|
|
||||||
|
return prefix + iddata[:(idlen-len(prefix)-checklen)] + checkstr
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateID(prefix string, value string) error {
|
||||||
|
if len(value) != idlen {
|
||||||
|
return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.HasPrefix(value, prefix) {
|
||||||
|
return exerr.New(exerr.TypeInvalidCSID, "id is missing the correct prefix").Str("value", value).Str("prefix", prefix).Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
checksum := 0
|
||||||
|
for i := len(prefix); i < len(value)-checklen; i++ {
|
||||||
|
ichr := int(value[i])
|
||||||
|
if ichr < 0 || ichr >= len(charSetReverseMap) || charSetReverseMap[ichr] == -1 {
|
||||||
|
return exerr.New(exerr.TypeInvalidCSID, "id contains invalid characters").Str("value", value).Build()
|
||||||
|
}
|
||||||
|
checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen)
|
||||||
|
}
|
||||||
|
|
||||||
|
checkstr := string(idCharset[checksum%idCharsetLen])
|
||||||
|
|
||||||
|
if !strings.HasSuffix(value, checkstr) {
|
||||||
|
return exerr.New(exerr.TypeInvalidCSID, "id checkstring is invalid").Str("value", value).Str("checkstr", checkstr).Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getRawData(prefix string, value string) string {
|
||||||
|
if len(value) != idlen {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return value[len(prefix) : idlen-checklen]
|
||||||
|
}
|
||||||
|
|
||||||
|
func getCheckString(prefix string, value string) string {
|
||||||
|
if len(value) != idlen {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return value[idlen-checklen:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func ValidateEntityID(vfl validator.FieldLevel) bool {
|
||||||
|
if !vfl.Field().CanInterface() {
|
||||||
|
log.Error().Msgf("Failed to validate EntityID (cannot interface ?!?)")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
ifvalue := vfl.Field().Interface()
|
||||||
|
|
||||||
|
if value1, ok := ifvalue.(EntityID); ok {
|
||||||
|
|
||||||
|
if vfl.Field().Type().Kind() == reflect.Pointer && langext.IsNil(value1) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := value1.Valid(); err != nil {
|
||||||
|
log.Debug().Msgf("Failed to validate EntityID '%s' (%s)", value1.String(), err.Error())
|
||||||
|
return false
|
||||||
|
} else {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
log.Error().Msgf("Failed to validate EntityID (wrong type: %T)", ifvalue)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{{range .IDs}}
|
||||||
|
|
||||||
|
// ================================ {{.Name}} ({{.FileRelative}}) ================================
|
||||||
|
|
||||||
|
func New{{.Name}}() {{.Name}} {
|
||||||
|
return {{.Name}}(generateID(prefix{{.Name}}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (id {{.Name}}) Valid() error {
|
||||||
|
return validateID(prefix{{.Name}}, string(id))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i {{.Name}}) String() string {
|
||||||
|
return string(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i {{.Name}}) Prefix() string {
|
||||||
|
return prefix{{.Name}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (id {{.Name}}) Raw() string {
|
||||||
|
return getRawData(prefix{{.Name}}, string(id))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (id {{.Name}}) CheckString() string {
|
||||||
|
return getCheckString(prefix{{.Name}}, string(id))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (id {{.Name}}) Regex() rext.Regex {
|
||||||
|
return regex{{.Name}}
|
||||||
|
}
|
||||||
|
|
||||||
|
{{end}}
|
||||||
52
bfcodegen/csid-generate_test.go
Normal file
52
bfcodegen/csid-generate_test.go
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
package bfcodegen
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "embed"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed _test_example_1.tgz
|
||||||
|
var CSIDExampleModels1 []byte
|
||||||
|
|
||||||
|
func TestGenerateCSIDSpecs(t *testing.T) {
|
||||||
|
|
||||||
|
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
|
||||||
|
|
||||||
|
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||||
|
|
||||||
|
err := os.WriteFile(tmpFile, CSIDExampleModels1, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||||
|
|
||||||
|
err = os.Mkdir(tmpDir, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
|
||||||
|
|
||||||
|
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/csid_gen.go"))(t)))
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
@@ -1,10 +1,13 @@
|
|||||||
package bfcodegen
|
package bfcodegen
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
_ "embed"
|
||||||
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"go/format"
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext"
|
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||||
@@ -12,15 +15,18 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"text/template"
|
||||||
)
|
)
|
||||||
|
|
||||||
type EnumDefVal struct {
|
type EnumDefVal struct {
|
||||||
VarName string
|
VarName string
|
||||||
Value string
|
Value string
|
||||||
Description *string
|
Description *string
|
||||||
|
Data *map[string]any
|
||||||
|
RawComment *string
|
||||||
}
|
}
|
||||||
|
|
||||||
type EnumDef struct {
|
type EnumDef struct {
|
||||||
@@ -31,41 +37,64 @@ type EnumDef struct {
|
|||||||
Values []EnumDefVal
|
Values []EnumDefVal
|
||||||
}
|
}
|
||||||
|
|
||||||
var rexPackage = rext.W(regexp.MustCompile("^package\\s+(?P<name>[A-Za-z0-9_]+)\\s*$"))
|
var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
|
||||||
|
|
||||||
var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*//\\s*(@enum:type).*$"))
|
var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
|
||||||
|
|
||||||
var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$"))
|
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<comm>.*))?.*$`))
|
||||||
|
|
||||||
var rexChecksumConst = rext.W(regexp.MustCompile("const ChecksumGenerator = \"(?P<cs>[A-Za-z0-9_]*)\""))
|
var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
|
||||||
|
|
||||||
|
//go:embed enum-generate.template
|
||||||
|
var templateEnumGenerateText string
|
||||||
|
|
||||||
func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||||
|
|
||||||
files, err := os.ReadDir(sourceDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
oldChecksum := "N/A"
|
oldChecksum := "N/A"
|
||||||
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
||||||
content, err := os.ReadFile(destFile)
|
content, err := os.ReadFile(destFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if m, ok := rexChecksumConst.MatchFirst(string(content)); ok {
|
if m, ok := rexEnumChecksumConst.MatchFirst(string(content)); ok {
|
||||||
oldChecksum = m.GroupByName("cs").Value()
|
oldChecksum = m.GroupByName("cs").Value()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, true)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !changed {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.WriteFile(destFile, []byte(gocode), 0o755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool) (string, string, bool, error) {
|
||||||
|
|
||||||
|
files, err := os.ReadDir(sourceDir)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", false, err
|
||||||
|
}
|
||||||
|
|
||||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
|
||||||
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
||||||
|
|
||||||
newChecksumStr := goext.GoextVersion
|
newChecksumStr := goext.GoextVersion
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return "", "", false, err
|
||||||
}
|
}
|
||||||
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
||||||
}
|
}
|
||||||
@@ -76,7 +105,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
|||||||
fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||||
return nil
|
return "", oldChecksum, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
allEnums := make([]EnumDef, 0)
|
allEnums := make([]EnumDef, 0)
|
||||||
@@ -85,9 +114,9 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
|||||||
|
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
fmt.Printf("========= %s =========\n\n", f.Name())
|
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||||
fileEnums, pn, err := processFile(sourceDir, path.Join(sourceDir, f.Name()))
|
fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return "", "", false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("\n")
|
fmt.Printf("\n")
|
||||||
@@ -100,32 +129,24 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if pkgname == "" {
|
if pkgname == "" {
|
||||||
return errors.New("no package name found in any file")
|
return "", "", false, errors.New("no package name found in any file")
|
||||||
}
|
}
|
||||||
|
|
||||||
err = os.WriteFile(destFile, []byte(fmtOutput(newChecksum, allEnums, pkgname)), 0o755)
|
rdata := fmtEnumOutput(newChecksum, allEnums, pkgname)
|
||||||
|
|
||||||
|
if !gofmt {
|
||||||
|
return rdata, newChecksum, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fdata, err := format.Source([]byte(rdata))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return "", "", false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second))
|
return string(fdata), newChecksum, true, nil
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if res.CommandTimedOut {
|
|
||||||
fmt.Println(res.StdCombined)
|
|
||||||
return errors.New("go fmt timed out")
|
|
||||||
}
|
|
||||||
if res.ExitCode != 0 {
|
|
||||||
fmt.Println(res.StdCombined)
|
|
||||||
return errors.New("go fmt did not succeed")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func processFile(basedir string, fn string) ([]EnumDef, string, error) {
|
func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
||||||
file, err := os.Open(fn)
|
file, err := os.Open(fn)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", err
|
return nil, "", err
|
||||||
@@ -149,7 +170,7 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if match, ok := rexPackage.MatchFirst(line); i == 0 && ok {
|
if match, ok := rexEnumPackage.MatchFirst(line); i == 0 && ok {
|
||||||
pkgname = match.GroupByName("name").Value()
|
pkgname = match.GroupByName("name").Value()
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@@ -172,12 +193,36 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
|
|||||||
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
|
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
if match, ok := rexValueDef.MatchFirst(line); ok {
|
if match, ok := rexEnumValueDef.MatchFirst(line); ok {
|
||||||
typename := match.GroupByName("type").Value()
|
typename := match.GroupByName("type").Value()
|
||||||
|
|
||||||
|
comment := match.GroupByNameOrEmpty("comm").ValueOrNil()
|
||||||
|
var descr *string = nil
|
||||||
|
var data *map[string]any = nil
|
||||||
|
if comment != nil {
|
||||||
|
comment = langext.Ptr(strings.TrimSpace(*comment))
|
||||||
|
if strings.HasPrefix(*comment, "{") {
|
||||||
|
if v, ok := tryParseDataComment(*comment); ok {
|
||||||
|
data = &v
|
||||||
|
if anyDataDescr, ok := v["description"]; ok {
|
||||||
|
if dataDescr, ok := anyDataDescr.(string); ok {
|
||||||
|
descr = &dataDescr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
descr = comment
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
descr = comment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def := EnumDefVal{
|
def := EnumDefVal{
|
||||||
VarName: match.GroupByName("name").Value(),
|
VarName: match.GroupByName("name").Value(),
|
||||||
Value: match.GroupByName("value").Value(),
|
Value: match.GroupByName("value").Value(),
|
||||||
Description: match.GroupByNameOrEmpty("descr").ValueOrNil(),
|
RawComment: comment,
|
||||||
|
Description: descr,
|
||||||
|
Data: data,
|
||||||
}
|
}
|
||||||
|
|
||||||
found := false
|
found := false
|
||||||
@@ -202,163 +247,109 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
|
|||||||
return enums, pkgname, nil
|
return enums, pkgname, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func fmtOutput(cs string, enums []EnumDef, pkgname string) string {
|
func tryParseDataComment(s string) (map[string]any, bool) {
|
||||||
str := "// Code generated by enum-generate.go DO NOT EDIT.\n"
|
|
||||||
str += "\n"
|
|
||||||
str += "package " + pkgname + "\n"
|
|
||||||
str += "\n"
|
|
||||||
|
|
||||||
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n"
|
r := make(map[string]any)
|
||||||
str += "\n"
|
|
||||||
|
|
||||||
str += "const ChecksumGenerator = \"" + cs + "\"" + "\n"
|
|
||||||
str += "\n"
|
|
||||||
|
|
||||||
str += "type Enum interface {" + "\n"
|
|
||||||
str += " Valid() bool" + "\n"
|
|
||||||
str += " ValuesAny() []any" + "\n"
|
|
||||||
str += " ValuesMeta() []EnumMetaValue" + "\n"
|
|
||||||
str += " VarName() string" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "type StringEnum interface {" + "\n"
|
|
||||||
str += " Enum" + "\n"
|
|
||||||
str += " String() string" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "type DescriptionEnum interface {" + "\n"
|
|
||||||
str += " Enum" + "\n"
|
|
||||||
str += " Description() string" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "\n"
|
|
||||||
|
|
||||||
str += "type EnumMetaValue struct {" + "\n"
|
|
||||||
str += " VarName string `json:\"varName\"`" + "\n"
|
|
||||||
str += " Value any `json:\"value\"`" + "\n"
|
|
||||||
str += " Description *string `json:\"description\"`" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "\n"
|
|
||||||
|
|
||||||
for _, enumdef := range enums {
|
|
||||||
|
|
||||||
hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil })
|
|
||||||
hasStr := enumdef.Type == "string"
|
|
||||||
|
|
||||||
str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n"
|
|
||||||
str += "//" + "\n"
|
|
||||||
str += "// File: " + enumdef.FileRelative + "\n"
|
|
||||||
str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n"
|
|
||||||
str += "// DescrEnum: " + langext.Conditional(hasDescr, "true", "false") + "\n"
|
|
||||||
str += "//" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n"
|
|
||||||
for _, v := range enumdef.Values {
|
|
||||||
str += " " + v.VarName + "," + "\n"
|
|
||||||
}
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
if hasDescr {
|
|
||||||
str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n"
|
|
||||||
for _, v := range enumdef.Values {
|
|
||||||
str += " " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n"
|
|
||||||
}
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
}
|
|
||||||
|
|
||||||
str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n"
|
|
||||||
for _, v := range enumdef.Values {
|
|
||||||
str += " " + v.VarName + ": \"" + v.VarName + "\"," + "\n"
|
|
||||||
}
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n"
|
|
||||||
str += " return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n"
|
|
||||||
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n"
|
|
||||||
str += " return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []EnumMetaValue {" + "\n"
|
|
||||||
str += " return []EnumMetaValue{" + "\n"
|
|
||||||
for _, v := range enumdef.Values {
|
|
||||||
if hasDescr {
|
|
||||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
|
|
||||||
} else {
|
|
||||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
str += " }" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
if hasStr {
|
|
||||||
str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n"
|
|
||||||
str += " return string(e)" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
}
|
|
||||||
|
|
||||||
if hasDescr {
|
|
||||||
str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n"
|
|
||||||
str += " if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n"
|
|
||||||
str += " return d" + "\n"
|
|
||||||
str += " }" + "\n"
|
|
||||||
str += " return \"\"" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
}
|
|
||||||
|
|
||||||
str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n"
|
|
||||||
str += " if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n"
|
|
||||||
str += " return d" + "\n"
|
|
||||||
str += " }" + "\n"
|
|
||||||
str += " return \"\"" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n"
|
|
||||||
str += " for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n"
|
|
||||||
str += " if string(ev) == vv {" + "\n"
|
|
||||||
str += " return ev, true" + "\n"
|
|
||||||
str += " }" + "\n"
|
|
||||||
str += " }" + "\n"
|
|
||||||
str += " return \"\", false" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n"
|
|
||||||
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
str += "func " + enumdef.EnumTypeName + "ValuesMeta() []EnumMetaValue {" + "\n"
|
|
||||||
str += " return []EnumMetaValue{" + "\n"
|
|
||||||
for _, v := range enumdef.Values {
|
|
||||||
if hasDescr {
|
|
||||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: langext.Ptr(\"%s\")},", v.VarName, v.VarName, strings.TrimSpace(*v.Description)) + "\n"
|
|
||||||
} else {
|
|
||||||
str += " " + fmt.Sprintf("EnumMetaValue{VarName: \"%s\", Value: %s, Description: nil},", v.VarName, v.VarName) + "\n"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
str += " }" + "\n"
|
|
||||||
str += "}" + "\n"
|
|
||||||
str += "" + "\n"
|
|
||||||
|
|
||||||
|
err := json.Unmarshal([]byte(s), &r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
return str
|
for _, v := range r {
|
||||||
|
|
||||||
|
rv := reflect.ValueOf(v)
|
||||||
|
|
||||||
|
if rv.Kind() == reflect.Ptr && rv.IsNil() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rv.Kind() == reflect.Bool {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rv.Kind() == reflect.String {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rv.Kind() == reflect.Int64 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rv.Kind() == reflect.Float64 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
return r, true
|
||||||
|
}
|
||||||
|
|
||||||
|
func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string {
|
||||||
|
|
||||||
|
templ := template.New("enum-generate")
|
||||||
|
|
||||||
|
templ = templ.Funcs(template.FuncMap{
|
||||||
|
"boolToStr": func(b bool) string { return langext.Conditional(b, "true", "false") },
|
||||||
|
"deref": func(v *string) string { return *v },
|
||||||
|
"trimSpace": func(str string) string { return strings.TrimSpace(str) },
|
||||||
|
"hasStr": func(v EnumDef) bool { return v.Type == "string" },
|
||||||
|
"hasDescr": func(v EnumDef) bool {
|
||||||
|
return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil })
|
||||||
|
},
|
||||||
|
"hasData": func(v EnumDef) bool {
|
||||||
|
return len(v.Values) > 0 && langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Data != nil })
|
||||||
|
},
|
||||||
|
"gostr": func(v any) string {
|
||||||
|
return fmt.Sprintf("%#+v", v)
|
||||||
|
},
|
||||||
|
"goobj": func(name string, v any) string {
|
||||||
|
return fmt.Sprintf("%#+v", v)
|
||||||
|
},
|
||||||
|
"godatakey": func(v string) string {
|
||||||
|
return strings.ToUpper(v[0:1]) + v[1:]
|
||||||
|
},
|
||||||
|
"godatavalue": func(v any) string {
|
||||||
|
return fmt.Sprintf("%#+v", v)
|
||||||
|
},
|
||||||
|
"godatatype": func(v any) string {
|
||||||
|
return fmt.Sprintf("%T", v)
|
||||||
|
},
|
||||||
|
"mapindex": func(v map[string]any, k string) any {
|
||||||
|
return v[k]
|
||||||
|
},
|
||||||
|
"generalDataKeys": func(v EnumDef) map[string]string {
|
||||||
|
r0 := make(map[string]int)
|
||||||
|
|
||||||
|
for _, eval := range v.Values {
|
||||||
|
for k := range *eval.Data {
|
||||||
|
if ctr, ok := r0[k]; ok {
|
||||||
|
r0[k] = ctr + 1
|
||||||
|
} else {
|
||||||
|
r0[k] = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r1 := langext.MapToArr(r0)
|
||||||
|
r2 := langext.ArrFilter(r1, func(p langext.MapEntry[string, int]) bool { return p.Value == len(v.Values) })
|
||||||
|
r3 := langext.ArrMap(r2, func(p langext.MapEntry[string, int]) string { return p.Key })
|
||||||
|
r4 := langext.ArrToKVMap(r3, func(p string) string { return p }, func(p string) string { return fmt.Sprintf("%T", (*v.Values[0].Data)[p]) })
|
||||||
|
|
||||||
|
return r4
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
templ = template.Must(templ.Parse(templateEnumGenerateText))
|
||||||
|
|
||||||
|
buffer := bytes.Buffer{}
|
||||||
|
|
||||||
|
err := templ.Execute(&buffer, langext.H{
|
||||||
|
"PkgName": pkgname,
|
||||||
|
"Checksum": cs,
|
||||||
|
"GoextVersion": goext.GoextVersion,
|
||||||
|
"Enums": enums,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return buffer.String()
|
||||||
}
|
}
|
||||||
|
|||||||
137
bfcodegen/enum-generate.template
Normal file
137
bfcodegen/enum-generate.template
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
// Code generated by enum-generate.go DO NOT EDIT.
|
||||||
|
|
||||||
|
package {{.PkgName}}
|
||||||
|
|
||||||
|
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
import "gogs.mikescher.com/BlackForestBytes/goext/enums"
|
||||||
|
|
||||||
|
const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
|
||||||
|
|
||||||
|
{{range .Enums}}
|
||||||
|
|
||||||
|
{{ $hasStr := ( . | hasStr ) }}
|
||||||
|
{{ $hasDescr := ( . | hasDescr ) }}
|
||||||
|
{{ $hasData := ( . | hasData ) }}
|
||||||
|
|
||||||
|
// ================================ {{.EnumTypeName}} ================================
|
||||||
|
//
|
||||||
|
// File: {{.FileRelative}}
|
||||||
|
// StringEnum: {{$hasStr | boolToStr}}
|
||||||
|
// DescrEnum: {{$hasDescr | boolToStr}}
|
||||||
|
// DataEnum: {{$hasData | boolToStr}}
|
||||||
|
//
|
||||||
|
|
||||||
|
{{ $typename := .EnumTypeName }}
|
||||||
|
{{ $enumdef := . }}
|
||||||
|
|
||||||
|
var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}}
|
||||||
|
{{.VarName}}, {{end}}
|
||||||
|
}
|
||||||
|
|
||||||
|
{{if $hasDescr}}
|
||||||
|
var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}}
|
||||||
|
{{.VarName}}: {{.Description | deref | trimSpace | gostr}}, {{end}}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
{{if $hasData}}
|
||||||
|
type {{ .EnumTypeName }}Data struct { {{ range $datakey, $datatype := ($enumdef | generalDataKeys) }}
|
||||||
|
{{ $datakey | godatakey }} {{ $datatype }} `json:"{{ $datakey }}"` {{ end }}
|
||||||
|
}
|
||||||
|
|
||||||
|
var __{{.EnumTypeName}}Data = map[{{.EnumTypeName}}]{{.EnumTypeName}}Data{ {{range .Values}} {{ $enumvalue := . }}
|
||||||
|
{{.VarName}}: {{ $typename }}Data{ {{ range $datakey, $datatype := $enumdef | generalDataKeys }}
|
||||||
|
{{ $datakey | godatakey }}: {{ (mapindex $enumvalue.Data $datakey) | godatavalue }}, {{ end }}
|
||||||
|
}, {{end}}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
var __{{.EnumTypeName}}Varnames = map[{{.EnumTypeName}}]string{ {{range .Values}}
|
||||||
|
{{.VarName}}: "{{.VarName}}", {{end}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e {{.EnumTypeName}}) Valid() bool {
|
||||||
|
return langext.InArray(e, __{{.EnumTypeName}}Values)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e {{.EnumTypeName}}) Values() []{{.EnumTypeName}} {
|
||||||
|
return __{{.EnumTypeName}}Values
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e {{.EnumTypeName}}) ValuesAny() []any {
|
||||||
|
return langext.ArrCastToAny(__{{.EnumTypeName}}Values)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e {{.EnumTypeName}}) ValuesMeta() []enums.EnumMetaValue {
|
||||||
|
return {{.EnumTypeName}}ValuesMeta()
|
||||||
|
}
|
||||||
|
|
||||||
|
{{if $hasStr}}
|
||||||
|
func (e {{.EnumTypeName}}) String() string {
|
||||||
|
return string(e)
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
{{if $hasDescr}}
|
||||||
|
func (e {{.EnumTypeName}}) Description() string {
|
||||||
|
if d, ok := __{{.EnumTypeName}}Descriptions[e]; ok {
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
{{if $hasData}}
|
||||||
|
func (e {{.EnumTypeName}}) Data() {{.EnumTypeName}}Data {
|
||||||
|
if d, ok := __{{.EnumTypeName}}Data[e]; ok {
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
return {{.EnumTypeName}}Data{}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
func (e {{.EnumTypeName}}) VarName() string {
|
||||||
|
if d, ok := __{{.EnumTypeName}}Varnames[e]; ok {
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue {
|
||||||
|
{{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}}
|
||||||
|
}
|
||||||
|
|
||||||
|
{{if $hasDescr}}
|
||||||
|
func (e {{.EnumTypeName}}) DescriptionMeta() enums.EnumDescriptionMetaValue {
|
||||||
|
return enums.EnumDescriptionMetaValue{VarName: e.VarName(), Value: e, Description: e.Description()}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) {
|
||||||
|
for _, ev := range __{{.EnumTypeName}}Values {
|
||||||
|
if string(ev) == vv {
|
||||||
|
return ev, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
func {{.EnumTypeName}}Values() []{{.EnumTypeName}} {
|
||||||
|
return __{{.EnumTypeName}}Values
|
||||||
|
}
|
||||||
|
|
||||||
|
func {{.EnumTypeName}}ValuesMeta() []enums.EnumMetaValue {
|
||||||
|
return []enums.EnumMetaValue{ {{range .Values}}
|
||||||
|
{{.VarName}}.Meta(), {{end}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{{if $hasDescr}}
|
||||||
|
func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue {
|
||||||
|
return []enums.EnumDescriptionMetaValue{ {{range .Values}}
|
||||||
|
{{.VarName}}.DescriptionMeta(), {{end}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
{{end}}
|
||||||
@@ -1,15 +1,91 @@
|
|||||||
package bfcodegen
|
package bfcodegen
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
_ "embed"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestApplyEnvOverridesSimple(t *testing.T) {
|
//go:embed _test_example_1.tgz
|
||||||
|
var EnumExampleModels1 []byte
|
||||||
|
|
||||||
err := GenerateEnumSpecs("/home/mike/Code/reiff/badennet/bnet-backend/models", "/home/mike/Code/reiff/badennet/bnet-backend/models/enums_gen.go")
|
//go:embed _test_example_2.tgz
|
||||||
if err != nil {
|
var EnumExampleModels2 []byte
|
||||||
t.Error(err)
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
|
|
||||||
|
func TestGenerateEnumSpecs(t *testing.T) {
|
||||||
|
|
||||||
|
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
|
||||||
|
|
||||||
|
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||||
|
|
||||||
|
err := os.WriteFile(tmpFile, EnumExampleModels1, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||||
|
|
||||||
|
err = os.Mkdir(tmpDir, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
|
||||||
|
|
||||||
|
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertEqual(t, cs1, cs2)
|
||||||
|
tst.AssertEqual(t, s1, s2)
|
||||||
|
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println(s1)
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGenerateEnumSpecsData(t *testing.T) {
|
||||||
|
|
||||||
|
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
|
||||||
|
|
||||||
|
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||||
|
|
||||||
|
err := os.WriteFile(tmpFile, EnumExampleModels2, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||||
|
|
||||||
|
err = os.Mkdir(tmpDir, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
|
||||||
|
|
||||||
|
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println(s1)
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
}
|
}
|
||||||
|
|||||||
183
bfcodegen/id-generate.go
Normal file
183
bfcodegen/id-generate.go
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
package bfcodegen
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
_ "embed"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
type IDDef struct {
|
||||||
|
File string
|
||||||
|
FileRelative string
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
|
||||||
|
|
||||||
|
var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`))
|
||||||
|
|
||||||
|
var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
|
||||||
|
|
||||||
|
//go:embed id-generate.template
|
||||||
|
var templateIDGenerateText string
|
||||||
|
|
||||||
|
func GenerateIDSpecs(sourceDir string, destFile string) error {
|
||||||
|
|
||||||
|
files, err := os.ReadDir(sourceDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
oldChecksum := "N/A"
|
||||||
|
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
||||||
|
content, err := os.ReadFile(destFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if m, ok := rexIDChecksumConst.MatchFirst(string(content)); ok {
|
||||||
|
oldChecksum = m.GroupByName("cs").Value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||||
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
|
||||||
|
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
||||||
|
|
||||||
|
newChecksumStr := goext.GoextVersion
|
||||||
|
for _, f := range files {
|
||||||
|
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
|
||||||
|
|
||||||
|
if newChecksum != oldChecksum {
|
||||||
|
fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
allIDs := make([]IDDef, 0)
|
||||||
|
|
||||||
|
pkgname := ""
|
||||||
|
|
||||||
|
for _, f := range files {
|
||||||
|
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||||
|
fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("\n")
|
||||||
|
|
||||||
|
allIDs = append(allIDs, fileIDs...)
|
||||||
|
|
||||||
|
if pn != "" {
|
||||||
|
pkgname = pn
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkgname == "" {
|
||||||
|
return errors.New("no package name found in any file")
|
||||||
|
}
|
||||||
|
|
||||||
|
fdata, err := format.Source([]byte(fmtIDOutput(newChecksum, allIDs, pkgname)))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.WriteFile(destFile, fdata, 0o755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func processIDFile(basedir string, fn string) ([]IDDef, string, error) {
|
||||||
|
file, err := os.Open(fn)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
bin, err := io.ReadAll(file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := strings.Split(string(bin), "\n")
|
||||||
|
|
||||||
|
ids := make([]IDDef, 0)
|
||||||
|
|
||||||
|
pkgname := ""
|
||||||
|
|
||||||
|
for i, line := range lines {
|
||||||
|
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if match, ok := rexIDPackage.MatchFirst(line); i == 0 && ok {
|
||||||
|
pkgname = match.GroupByName("name").Value()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if match, ok := rexIDDef.MatchFirst(line); ok {
|
||||||
|
|
||||||
|
rfp, err := filepath.Rel(basedir, fn)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
def := IDDef{
|
||||||
|
File: fn,
|
||||||
|
FileRelative: rfp,
|
||||||
|
Name: match.GroupByName("name").Value(),
|
||||||
|
}
|
||||||
|
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
|
||||||
|
ids = append(ids, def)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ids, pkgname, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fmtIDOutput(cs string, ids []IDDef, pkgname string) string {
|
||||||
|
templ := template.Must(template.New("id-generate").Parse(templateIDGenerateText))
|
||||||
|
|
||||||
|
buffer := bytes.Buffer{}
|
||||||
|
|
||||||
|
anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" })
|
||||||
|
|
||||||
|
err := templ.Execute(&buffer, langext.H{
|
||||||
|
"PkgName": pkgname,
|
||||||
|
"Checksum": cs,
|
||||||
|
"GoextVersion": goext.GoextVersion,
|
||||||
|
"IDs": ids,
|
||||||
|
"AnyDef": anyDef,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return buffer.String()
|
||||||
|
}
|
||||||
47
bfcodegen/id-generate.template
Normal file
47
bfcodegen/id-generate.template
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
// Code generated by id-generate.go DO NOT EDIT.
|
||||||
|
|
||||||
|
package {{.PkgName}}
|
||||||
|
|
||||||
|
import "go.mongodb.org/mongo-driver/bson"
|
||||||
|
import "go.mongodb.org/mongo-driver/bson/bsontype"
|
||||||
|
import "go.mongodb.org/mongo-driver/bson/primitive"
|
||||||
|
import "gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||||
|
|
||||||
|
const ChecksumIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
|
||||||
|
|
||||||
|
{{range .IDs}}
|
||||||
|
|
||||||
|
// ================================ {{.Name}} ({{.FileRelative}}) ================================
|
||||||
|
|
||||||
|
func (i {{.Name}}) MarshalBSONValue() (bsontype.Type, []byte, error) {
|
||||||
|
if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil {
|
||||||
|
return bson.MarshalValue(objId)
|
||||||
|
} else {
|
||||||
|
return 0, nil, exerr.New(exerr.TypeMarshalEntityID, "Failed to marshal {{.Name}}("+i.String()+") to ObjectId").Str("value", string(i)).Type("type", i).Build()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i {{.Name}}) String() string {
|
||||||
|
return string(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i {{.Name}}) ObjID() (primitive.ObjectID, error) {
|
||||||
|
return primitive.ObjectIDFromHex(string(i))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i {{.Name}}) Valid() bool {
|
||||||
|
_, err := primitive.ObjectIDFromHex(string(i))
|
||||||
|
return err == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
{{if ne $.AnyDef nil}}
|
||||||
|
func (i {{.Name}}) AsAny() {{$.AnyDef.Name}} {
|
||||||
|
return {{$.AnyDef.Name}}(i)
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
func New{{.Name}}() {{.Name}} {
|
||||||
|
return {{.Name}}(primitive.NewObjectID().Hex())
|
||||||
|
}
|
||||||
|
|
||||||
|
{{end}}
|
||||||
52
bfcodegen/id-generate_test.go
Normal file
52
bfcodegen/id-generate_test.go
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
package bfcodegen
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "embed"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed _test_example_1.tgz
|
||||||
|
var IDExampleModels1 []byte
|
||||||
|
|
||||||
|
func TestGenerateIDSpecs(t *testing.T) {
|
||||||
|
|
||||||
|
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
|
||||||
|
|
||||||
|
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||||
|
|
||||||
|
err := os.WriteFile(tmpFile, IDExampleModels1, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||||
|
|
||||||
|
err = os.Mkdir(tmpDir, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
|
||||||
|
|
||||||
|
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/id_gen.go"))(t)))
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
@@ -14,6 +14,7 @@ type CommandRunner struct {
|
|||||||
listener []CommandListener
|
listener []CommandListener
|
||||||
enforceExitCodes *[]int
|
enforceExitCodes *[]int
|
||||||
enforceNoTimeout bool
|
enforceNoTimeout bool
|
||||||
|
enforceNoStderr bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func Runner(program string) *CommandRunner {
|
func Runner(program string) *CommandRunner {
|
||||||
@@ -25,6 +26,7 @@ func Runner(program string) *CommandRunner {
|
|||||||
listener: make([]CommandListener, 0),
|
listener: make([]CommandListener, 0),
|
||||||
enforceExitCodes: nil,
|
enforceExitCodes: nil,
|
||||||
enforceNoTimeout: false,
|
enforceNoTimeout: false,
|
||||||
|
enforceNoStderr: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,6 +75,11 @@ func (r *CommandRunner) FailOnTimeout() *CommandRunner {
|
|||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *CommandRunner) FailOnStderr() *CommandRunner {
|
||||||
|
r.enforceNoStderr = true
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
func (r *CommandRunner) Listen(lstr CommandListener) *CommandRunner {
|
func (r *CommandRunner) Listen(lstr CommandListener) *CommandRunner {
|
||||||
r.listener = append(r.listener, lstr)
|
r.listener = append(r.listener, lstr)
|
||||||
return r
|
return r
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import (
|
|||||||
|
|
||||||
var ErrExitCode = errors.New("process exited with an unexpected exitcode")
|
var ErrExitCode = errors.New("process exited with an unexpected exitcode")
|
||||||
var ErrTimeout = errors.New("process did not exit after the specified timeout")
|
var ErrTimeout = errors.New("process did not exit after the specified timeout")
|
||||||
|
var ErrStderrPrint = errors.New("process did print to stderr stream")
|
||||||
|
|
||||||
type CommandResult struct {
|
type CommandResult struct {
|
||||||
StdOut string
|
StdOut string
|
||||||
@@ -53,12 +54,27 @@ func run(opt CommandRunner) (CommandResult, error) {
|
|||||||
err error
|
err error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stderrFailChan := make(chan bool)
|
||||||
|
|
||||||
outputChan := make(chan resultObj)
|
outputChan := make(chan resultObj)
|
||||||
go func() {
|
go func() {
|
||||||
// we need to first fully read the pipes and then call Wait
|
// we need to first fully read the pipes and then call Wait
|
||||||
// see https://pkg.go.dev/os/exec#Cmd.StdoutPipe
|
// see https://pkg.go.dev/os/exec#Cmd.StdoutPipe
|
||||||
|
|
||||||
stdout, stderr, stdcombined, err := preader.Read(opt.listener)
|
listener := make([]CommandListener, 0)
|
||||||
|
listener = append(listener, opt.listener...)
|
||||||
|
|
||||||
|
if opt.enforceNoStderr {
|
||||||
|
listener = append(listener, genericCommandListener{
|
||||||
|
_readRawStderr: langext.Ptr(func(v []byte) {
|
||||||
|
if len(v) > 0 {
|
||||||
|
stderrFailChan <- true
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
stdout, stderr, stdcombined, err := preader.Read(listener)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
outputChan <- resultObj{stdout, stderr, stdcombined, err}
|
outputChan <- resultObj{stdout, stderr, stdcombined, err}
|
||||||
_ = cmd.Process.Kill()
|
_ = cmd.Process.Kill()
|
||||||
@@ -115,8 +131,34 @@ func run(opt CommandRunner) (CommandResult, error) {
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case <-stderrFailChan:
|
||||||
|
_ = cmd.Process.Kill()
|
||||||
|
|
||||||
|
if fallback, ok := syncext.ReadChannelWithTimeout(outputChan, 32*time.Millisecond); ok {
|
||||||
|
// most of the time the cmd.Process.Kill() should also have finished the pipereader
|
||||||
|
// and we can at least return the already collected stdout, stderr, etc
|
||||||
|
res := CommandResult{
|
||||||
|
StdOut: fallback.stdout,
|
||||||
|
StdErr: fallback.stderr,
|
||||||
|
StdCombined: fallback.stdcombined,
|
||||||
|
ExitCode: -1,
|
||||||
|
CommandTimedOut: false,
|
||||||
|
}
|
||||||
|
return res, ErrStderrPrint
|
||||||
|
} else {
|
||||||
|
res := CommandResult{
|
||||||
|
StdOut: "",
|
||||||
|
StdErr: "",
|
||||||
|
StdCombined: "",
|
||||||
|
ExitCode: -1,
|
||||||
|
CommandTimedOut: false,
|
||||||
|
}
|
||||||
|
return res, ErrStderrPrint
|
||||||
|
}
|
||||||
|
|
||||||
case outobj := <-outputChan:
|
case outobj := <-outputChan:
|
||||||
if exiterr, ok := outobj.err.(*exec.ExitError); ok {
|
var exiterr *exec.ExitError
|
||||||
|
if errors.As(outobj.err, &exiterr) {
|
||||||
excode := exiterr.ExitCode()
|
excode := exiterr.ExitCode()
|
||||||
for _, lstr := range opt.listener {
|
for _, lstr := range opt.listener {
|
||||||
lstr.Finished(excode)
|
lstr.Finished(excode)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package cmdext
|
package cmdext
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
@@ -32,7 +33,7 @@ func TestStdout(t *testing.T) {
|
|||||||
|
|
||||||
func TestStderr(t *testing.T) {
|
func TestStderr(t *testing.T) {
|
||||||
|
|
||||||
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run()
|
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("%v", err)
|
t.Errorf("%v", err)
|
||||||
}
|
}
|
||||||
@@ -55,7 +56,7 @@ func TestStderr(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestStdcombined(t *testing.T) {
|
func TestStdcombined(t *testing.T) {
|
||||||
res1, err := Runner("python").
|
res1, err := Runner("python3").
|
||||||
Arg("-c").
|
Arg("-c").
|
||||||
Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)").
|
Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)").
|
||||||
Run()
|
Run()
|
||||||
@@ -81,7 +82,7 @@ func TestStdcombined(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestPartialRead(t *testing.T) {
|
func TestPartialRead(t *testing.T) {
|
||||||
res1, err := Runner("python").
|
res1, err := Runner("python3").
|
||||||
Arg("-c").
|
Arg("-c").
|
||||||
Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);").
|
Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);").
|
||||||
Timeout(100 * time.Millisecond).
|
Timeout(100 * time.Millisecond).
|
||||||
@@ -105,7 +106,7 @@ func TestPartialRead(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestPartialReadStderr(t *testing.T) {
|
func TestPartialReadStderr(t *testing.T) {
|
||||||
res1, err := Runner("python").
|
res1, err := Runner("python3").
|
||||||
Arg("-c").
|
Arg("-c").
|
||||||
Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);").
|
Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);").
|
||||||
Timeout(100 * time.Millisecond).
|
Timeout(100 * time.Millisecond).
|
||||||
@@ -130,7 +131,7 @@ func TestPartialReadStderr(t *testing.T) {
|
|||||||
|
|
||||||
func TestReadUnflushedStdout(t *testing.T) {
|
func TestReadUnflushedStdout(t *testing.T) {
|
||||||
|
|
||||||
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run()
|
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("%v", err)
|
t.Errorf("%v", err)
|
||||||
}
|
}
|
||||||
@@ -154,7 +155,7 @@ func TestReadUnflushedStdout(t *testing.T) {
|
|||||||
|
|
||||||
func TestReadUnflushedStderr(t *testing.T) {
|
func TestReadUnflushedStderr(t *testing.T) {
|
||||||
|
|
||||||
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run()
|
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("%v", err)
|
t.Errorf("%v", err)
|
||||||
}
|
}
|
||||||
@@ -179,7 +180,7 @@ func TestReadUnflushedStderr(t *testing.T) {
|
|||||||
func TestPartialReadUnflushed(t *testing.T) {
|
func TestPartialReadUnflushed(t *testing.T) {
|
||||||
t.SkipNow()
|
t.SkipNow()
|
||||||
|
|
||||||
res1, err := Runner("python").
|
res1, err := Runner("python3").
|
||||||
Arg("-c").
|
Arg("-c").
|
||||||
Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');").
|
Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');").
|
||||||
Timeout(100 * time.Millisecond).
|
Timeout(100 * time.Millisecond).
|
||||||
@@ -205,7 +206,7 @@ func TestPartialReadUnflushed(t *testing.T) {
|
|||||||
func TestPartialReadUnflushedStderr(t *testing.T) {
|
func TestPartialReadUnflushedStderr(t *testing.T) {
|
||||||
t.SkipNow()
|
t.SkipNow()
|
||||||
|
|
||||||
res1, err := Runner("python").
|
res1, err := Runner("python3").
|
||||||
Arg("-c").
|
Arg("-c").
|
||||||
Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');").
|
Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');").
|
||||||
Timeout(100 * time.Millisecond).
|
Timeout(100 * time.Millisecond).
|
||||||
@@ -230,7 +231,7 @@ func TestPartialReadUnflushedStderr(t *testing.T) {
|
|||||||
|
|
||||||
func TestListener(t *testing.T) {
|
func TestListener(t *testing.T) {
|
||||||
|
|
||||||
res1, err := Runner("python").
|
res1, err := Runner("python3").
|
||||||
Arg("-c").
|
Arg("-c").
|
||||||
Arg("import sys;" +
|
Arg("import sys;" +
|
||||||
"import time;" +
|
"import time;" +
|
||||||
@@ -263,7 +264,7 @@ func TestListener(t *testing.T) {
|
|||||||
|
|
||||||
func TestLongStdout(t *testing.T) {
|
func TestLongStdout(t *testing.T) {
|
||||||
|
|
||||||
res1, err := Runner("python").
|
res1, err := Runner("python3").
|
||||||
Arg("-c").
|
Arg("-c").
|
||||||
Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");").
|
Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");").
|
||||||
Timeout(5000 * time.Millisecond).
|
Timeout(5000 * time.Millisecond).
|
||||||
@@ -289,16 +290,40 @@ func TestLongStdout(t *testing.T) {
|
|||||||
func TestFailOnTimeout(t *testing.T) {
|
func TestFailOnTimeout(t *testing.T) {
|
||||||
|
|
||||||
_, err := Runner("sleep").Arg("2").Timeout(200 * time.Millisecond).FailOnTimeout().Run()
|
_, err := Runner("sleep").Arg("2").Timeout(200 * time.Millisecond).FailOnTimeout().Run()
|
||||||
if err != ErrTimeout {
|
if !errors.Is(err, ErrTimeout) {
|
||||||
t.Errorf("wrong err := %v", err)
|
t.Errorf("wrong err := %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFailOnStderr(t *testing.T) {
|
||||||
|
|
||||||
|
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").FailOnStderr().Run()
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("no err")
|
||||||
|
}
|
||||||
|
if res1.CommandTimedOut {
|
||||||
|
t.Errorf("Timeout")
|
||||||
|
}
|
||||||
|
if res1.ExitCode != -1 {
|
||||||
|
t.Errorf("res1.ExitCode == %v", res1.ExitCode)
|
||||||
|
}
|
||||||
|
if res1.StdErr != "error" {
|
||||||
|
t.Errorf("res1.StdErr == '%v'", res1.StdErr)
|
||||||
|
}
|
||||||
|
if res1.StdOut != "" {
|
||||||
|
t.Errorf("res1.StdOut == '%v'", res1.StdOut)
|
||||||
|
}
|
||||||
|
if res1.StdCombined != "error\n" {
|
||||||
|
t.Errorf("res1.StdCombined == '%v'", res1.StdCombined)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func TestFailOnExitcode(t *testing.T) {
|
func TestFailOnExitcode(t *testing.T) {
|
||||||
|
|
||||||
_, err := Runner("false").Timeout(200 * time.Millisecond).FailOnExitCode().Run()
|
_, err := Runner("false").Timeout(200 * time.Millisecond).FailOnExitCode().Run()
|
||||||
if err != ErrExitCode {
|
if !errors.Is(err, ErrExitCode) {
|
||||||
t.Errorf("wrong err := %v", err)
|
t.Errorf("wrong err := %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -32,8 +32,8 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
|
|||||||
stdout := ""
|
stdout := ""
|
||||||
go func() {
|
go func() {
|
||||||
buf := make([]byte, 128)
|
buf := make([]byte, 128)
|
||||||
for true {
|
for {
|
||||||
n, out := pr.stdout.Read(buf)
|
n, err := pr.stdout.Read(buf)
|
||||||
if n > 0 {
|
if n > 0 {
|
||||||
txt := string(buf[:n])
|
txt := string(buf[:n])
|
||||||
stdout += txt
|
stdout += txt
|
||||||
@@ -42,11 +42,11 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
|
|||||||
lstr.ReadRawStdout(buf[:n])
|
lstr.ReadRawStdout(buf[:n])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if out == io.EOF {
|
if err == io.EOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if out != nil {
|
if err != nil {
|
||||||
errch <- out
|
errch <- err
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -61,7 +61,7 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
|
|||||||
stderr := ""
|
stderr := ""
|
||||||
go func() {
|
go func() {
|
||||||
buf := make([]byte, 128)
|
buf := make([]byte, 128)
|
||||||
for true {
|
for {
|
||||||
n, err := pr.stderr.Read(buf)
|
n, err := pr.stderr.Read(buf)
|
||||||
|
|
||||||
if n > 0 {
|
if n > 0 {
|
||||||
|
|||||||
@@ -41,12 +41,12 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if rvfield.Kind() == reflect.Struct {
|
envkey, found := rsfield.Tag.Lookup("env")
|
||||||
|
if !found || envkey == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
envkey, found := rsfield.Tag.Lookup("env")
|
if rvfield.Kind() == reflect.Struct && rvfield.Type() != reflect.TypeOf(time.UnixMilli(0)) {
|
||||||
if !found || envkey == "-" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
subPrefix := prefix
|
subPrefix := prefix
|
||||||
if envkey != "" {
|
if envkey != "" {
|
||||||
@@ -57,10 +57,7 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
envkey := rsfield.Tag.Get("env")
|
|
||||||
if envkey == "" || envkey == "-" {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package cryptext
|
|||||||
import (
|
import (
|
||||||
"crypto/rand"
|
"crypto/rand"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
|
"crypto/sha512"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"errors"
|
"errors"
|
||||||
@@ -14,14 +15,15 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
const LatestPassHashVersion = 4
|
const LatestPassHashVersion = 5
|
||||||
|
|
||||||
// PassHash
|
// PassHash
|
||||||
// - [v0]: plaintext password ( `0|...` )
|
// - [v0]: plaintext password ( `0|...` ) // simple, used to write PW's directly in DB
|
||||||
// - [v1]: sha256(plaintext)
|
// - [v1]: sha256(plaintext) // simple hashing
|
||||||
// - [v2]: seed | sha256<seed>(plaintext)
|
// - [v2]: seed | sha256<seed>(plaintext) // add seed
|
||||||
// - [v3]: seed | sha256<seed>(plaintext) | [hex(totp)]
|
// - [v3]: seed | sha256<seed>(plaintext) | [hex(totp)] // add TOTP support
|
||||||
// - [v4]: bcrypt(plaintext) | [hex(totp)]
|
// - [v4]: bcrypt(plaintext) | [hex(totp)] // use proper bcrypt
|
||||||
|
// - [v5]: bcrypt(sha512(plaintext)) | [hex(totp)] // hash pw before bcrypt (otherwise max pw-len = 72)
|
||||||
type PassHash string
|
type PassHash string
|
||||||
|
|
||||||
func (ph PassHash) Valid() bool {
|
func (ph PassHash) Valid() bool {
|
||||||
@@ -64,7 +66,6 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo
|
|||||||
return int(version), nil, payload, false, nil, true
|
return int(version), nil, payload, false, nil, true
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
|
||||||
if version == 2 {
|
if version == 2 {
|
||||||
if len(split) != 3 {
|
if len(split) != 3 {
|
||||||
return -1, nil, nil, false, nil, false
|
return -1, nil, nil, false, nil, false
|
||||||
@@ -109,7 +110,21 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo
|
|||||||
totp := false
|
totp := false
|
||||||
totpsecret := make([]byte, 0)
|
totpsecret := make([]byte, 0)
|
||||||
if split[2] != "0" {
|
if split[2] != "0" {
|
||||||
totpsecret, err = hex.DecodeString(split[3])
|
totpsecret, err = hex.DecodeString(split[2])
|
||||||
|
totp = true
|
||||||
|
}
|
||||||
|
return int(version), nil, payload, totp, totpsecret, true
|
||||||
|
}
|
||||||
|
|
||||||
|
if version == 5 {
|
||||||
|
if len(split) != 3 {
|
||||||
|
return -1, nil, nil, false, nil, false
|
||||||
|
}
|
||||||
|
payload := []byte(split[1])
|
||||||
|
totp := false
|
||||||
|
totpsecret := make([]byte, 0)
|
||||||
|
if split[2] != "0" {
|
||||||
|
totpsecret, err = hex.DecodeString(split[2])
|
||||||
totp = true
|
totp = true
|
||||||
}
|
}
|
||||||
return int(version), nil, payload, totp, totpsecret, true
|
return int(version), nil, payload, totp, totpsecret, true
|
||||||
@@ -156,6 +171,14 @@ func (ph PassHash) Verify(plainpass string, totp *string) bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if version == 5 {
|
||||||
|
if !hastotp {
|
||||||
|
return bcrypt.CompareHashAndPassword(payload, hash512(plainpass)) == nil
|
||||||
|
} else {
|
||||||
|
return bcrypt.CompareHashAndPassword(payload, hash512(plainpass)) == nil && totpext.Validate(totpsecret, *totp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -209,6 +232,12 @@ func (ph PassHash) ClearTOTP() (PassHash, error) {
|
|||||||
return PassHash(strings.Join(split, "|")), nil
|
return PassHash(strings.Join(split, "|")), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if version == 5 {
|
||||||
|
split := strings.Split(string(ph), "|")
|
||||||
|
split[2] = "0"
|
||||||
|
return PassHash(strings.Join(split, "|")), nil
|
||||||
|
}
|
||||||
|
|
||||||
return "", errors.New("unknown version")
|
return "", errors.New("unknown version")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -242,6 +271,12 @@ func (ph PassHash) WithTOTP(totpSecret []byte) (PassHash, error) {
|
|||||||
return PassHash(strings.Join(split, "|")), nil
|
return PassHash(strings.Join(split, "|")), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if version == 5 {
|
||||||
|
split := strings.Split(string(ph), "|")
|
||||||
|
split[2] = hex.EncodeToString(totpSecret)
|
||||||
|
return PassHash(strings.Join(split, "|")), nil
|
||||||
|
}
|
||||||
|
|
||||||
return "", errors.New("unknown version")
|
return "", errors.New("unknown version")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -271,6 +306,10 @@ func (ph PassHash) Change(newPlainPass string) (PassHash, error) {
|
|||||||
return HashPasswordV4(newPlainPass, langext.Conditional(hastotp, totpsecret, nil))
|
return HashPasswordV4(newPlainPass, langext.Conditional(hastotp, totpsecret, nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if version == 5 {
|
||||||
|
return HashPasswordV5(newPlainPass, langext.Conditional(hastotp, totpsecret, nil))
|
||||||
|
}
|
||||||
|
|
||||||
return "", errors.New("unknown version")
|
return "", errors.New("unknown version")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -279,7 +318,24 @@ func (ph PassHash) String() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func HashPassword(plainpass string, totpSecret []byte) (PassHash, error) {
|
func HashPassword(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||||
return HashPasswordV4(plainpass, totpSecret)
|
return HashPasswordV5(plainpass, totpSecret)
|
||||||
|
}
|
||||||
|
|
||||||
|
func HashPasswordV5(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||||
|
var strtotp string
|
||||||
|
|
||||||
|
if totpSecret == nil {
|
||||||
|
strtotp = "0"
|
||||||
|
} else {
|
||||||
|
strtotp = hex.EncodeToString(totpSecret)
|
||||||
|
}
|
||||||
|
|
||||||
|
payload, err := bcrypt.GenerateFromPassword(hash512(plainpass), bcrypt.MinCost)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return PassHash(fmt.Sprintf("5|%s|%s", string(payload), strtotp)), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func HashPasswordV4(plainpass string, totpSecret []byte) (PassHash, error) {
|
func HashPasswordV4(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||||
@@ -340,6 +396,13 @@ func HashPasswordV0(plainpass string) (PassHash, error) {
|
|||||||
return PassHash(fmt.Sprintf("0|%s", plainpass)), nil
|
return PassHash(fmt.Sprintf("0|%s", plainpass)), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func hash512(s string) []byte {
|
||||||
|
h := sha512.New()
|
||||||
|
h.Write([]byte(s))
|
||||||
|
bs := h.Sum(nil)
|
||||||
|
return bs
|
||||||
|
}
|
||||||
|
|
||||||
func hash256(s string) []byte {
|
func hash256(s string) []byte {
|
||||||
h := sha256.New()
|
h := sha256.New()
|
||||||
h.Write([]byte(s))
|
h.Write([]byte(s))
|
||||||
|
|||||||
210
cryptext/passHash_test.go
Normal file
210
cryptext/passHash_test.go
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
package cryptext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/totpext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestPassHash1(t *testing.T) {
|
||||||
|
ph, err := HashPassword("test123", nil)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPassHashTOTP(t *testing.T) {
|
||||||
|
sec, err := totpext.GenerateSecret()
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
ph, err := HashPassword("test123", sec)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertTrue(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPassHashUpgrade_V0(t *testing.T) {
|
||||||
|
ph, err := HashPasswordV0("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
ph, err = ph.Upgrade("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPassHashUpgrade_V1(t *testing.T) {
|
||||||
|
ph, err := HashPasswordV1("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
ph, err = ph.Upgrade("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPassHashUpgrade_V2(t *testing.T) {
|
||||||
|
ph, err := HashPasswordV2("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
ph, err = ph.Upgrade("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPassHashUpgrade_V3(t *testing.T) {
|
||||||
|
ph, err := HashPasswordV3("test123", nil)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
ph, err = ph.Upgrade("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPassHashUpgrade_V3_TOTP(t *testing.T) {
|
||||||
|
sec, err := totpext.GenerateSecret()
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
ph, err := HashPasswordV3("test123", sec)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertTrue(t, ph.HasTOTP())
|
||||||
|
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
ph, err = ph.Upgrade("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertTrue(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPassHashUpgrade_V4(t *testing.T) {
|
||||||
|
ph, err := HashPasswordV4("test123", nil)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
ph, err = ph.Upgrade("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertFalse(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPassHashUpgrade_V4_TOTP(t *testing.T) {
|
||||||
|
sec, err := totpext.GenerateSecret()
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
ph, err := HashPasswordV4("test123", sec)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertTrue(t, ph.HasTOTP())
|
||||||
|
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
|
||||||
|
ph, err = ph.Upgrade("test123")
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertTrue(t, ph.Valid())
|
||||||
|
tst.AssertTrue(t, ph.HasTOTP())
|
||||||
|
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||||
|
|
||||||
|
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||||
|
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||||
|
}
|
||||||
263
cryptext/pronouncablePassword.go
Normal file
263
cryptext/pronouncablePassword.go
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
package cryptext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"io"
|
||||||
|
"math/big"
|
||||||
|
mathrand "math/rand"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
ppStartChar = "BCDFGHJKLMNPQRSTVWXZ"
|
||||||
|
ppEndChar = "ABDEFIKMNORSTUXYZ"
|
||||||
|
ppVowel = "AEIOUY"
|
||||||
|
ppConsonant = "BCDFGHJKLMNPQRSTVWXZ"
|
||||||
|
ppSegmentLenMin = 3
|
||||||
|
ppSegmentLenMax = 7
|
||||||
|
ppMaxRepeatedVowel = 2
|
||||||
|
ppMaxRepeatedConsonant = 2
|
||||||
|
)
|
||||||
|
|
||||||
|
var ppContinuation = map[uint8]string{
|
||||||
|
'A': "BCDFGHJKLMNPRSTVWXYZ",
|
||||||
|
'B': "ADFIKLMNORSTUY",
|
||||||
|
'C': "AEIKOUY",
|
||||||
|
'D': "AEILORSUYZ",
|
||||||
|
'E': "BCDFGHJKLMNPRSTVWXYZ",
|
||||||
|
'F': "ADEGIKLOPRTUY",
|
||||||
|
'G': "ABDEFHILMNORSTUY",
|
||||||
|
'H': "AEIOUY",
|
||||||
|
'I': "BCDFGHJKLMNPRSTVWXZ",
|
||||||
|
'J': "AEIOUY",
|
||||||
|
'K': "ADEFHILMNORSTUY",
|
||||||
|
'L': "ADEFGIJKMNOPSTUVWYZ",
|
||||||
|
'M': "ABEFIKOPSTUY",
|
||||||
|
'N': "ABEFIKOPSTUY",
|
||||||
|
'O': "BCDFGHJKLMNPRSTVWXYZ",
|
||||||
|
'P': "AEFIJLORSTUY",
|
||||||
|
'Q': "AEIOUY",
|
||||||
|
'R': "ADEFGHIJKLMNOPSTUVYZ",
|
||||||
|
'S': "ACDEIKLOPTUYZ",
|
||||||
|
'T': "AEHIJOPRSUWY",
|
||||||
|
'U': "BCDFGHJKLMNPRSTVWXZ",
|
||||||
|
'V': "AEIOUY",
|
||||||
|
'W': "AEIOUY",
|
||||||
|
'X': "AEIOUY",
|
||||||
|
'Y': "ABCDFGHKLMNPRSTVXZ",
|
||||||
|
'Z': "AEILOTUY",
|
||||||
|
}
|
||||||
|
|
||||||
|
var ppLog2Map = map[int]float64{
|
||||||
|
1: 0.00000000,
|
||||||
|
2: 1.00000000,
|
||||||
|
3: 1.58496250,
|
||||||
|
4: 2.00000000,
|
||||||
|
5: 2.32192809,
|
||||||
|
6: 2.58496250,
|
||||||
|
7: 2.80735492,
|
||||||
|
8: 3.00000000,
|
||||||
|
9: 3.16992500,
|
||||||
|
10: 3.32192809,
|
||||||
|
11: 3.45943162,
|
||||||
|
12: 3.58496250,
|
||||||
|
13: 3.70043972,
|
||||||
|
14: 3.80735492,
|
||||||
|
15: 3.90689060,
|
||||||
|
16: 4.00000000,
|
||||||
|
17: 4.08746284,
|
||||||
|
18: 4.16992500,
|
||||||
|
19: 4.24792751,
|
||||||
|
20: 4.32192809,
|
||||||
|
21: 4.39231742,
|
||||||
|
22: 4.45943162,
|
||||||
|
23: 4.52356196,
|
||||||
|
24: 4.58496250,
|
||||||
|
25: 4.64385619,
|
||||||
|
26: 4.70043972,
|
||||||
|
27: 4.75488750,
|
||||||
|
28: 4.80735492,
|
||||||
|
29: 4.85798100,
|
||||||
|
30: 4.90689060,
|
||||||
|
31: 4.95419631,
|
||||||
|
32: 5.00000000,
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
ppVowelMap = ppMakeSet(ppVowel)
|
||||||
|
ppConsonantMap = ppMakeSet(ppConsonant)
|
||||||
|
ppEndCharMap = ppMakeSet(ppEndChar)
|
||||||
|
)
|
||||||
|
|
||||||
|
func ppMakeSet(v string) map[uint8]bool {
|
||||||
|
mp := make(map[uint8]bool, len(v))
|
||||||
|
for _, chr := range v {
|
||||||
|
mp[uint8(chr)] = true
|
||||||
|
}
|
||||||
|
return mp
|
||||||
|
}
|
||||||
|
|
||||||
|
func ppRandInt(rng io.Reader, max int) int {
|
||||||
|
v, err := rand.Int(rng, big.NewInt(int64(max)))
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return int(v.Int64())
|
||||||
|
}
|
||||||
|
|
||||||
|
func ppRand(rng io.Reader, chars string, entropy *float64) uint8 {
|
||||||
|
chr := chars[ppRandInt(rng, len(chars))]
|
||||||
|
|
||||||
|
*entropy = *entropy + ppLog2Map[len(chars)]
|
||||||
|
|
||||||
|
return chr
|
||||||
|
}
|
||||||
|
|
||||||
|
func ppCharType(chr uint8) (bool, bool) {
|
||||||
|
_, ok1 := ppVowelMap[chr]
|
||||||
|
_, ok2 := ppConsonantMap[chr]
|
||||||
|
|
||||||
|
return ok1, ok2
|
||||||
|
}
|
||||||
|
|
||||||
|
func ppCharsetRemove(cs string, set map[uint8]bool, allowEmpty bool) string {
|
||||||
|
result := ""
|
||||||
|
for _, chr := range cs {
|
||||||
|
if _, ok := set[uint8(chr)]; !ok {
|
||||||
|
result += string(chr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if result == "" && !allowEmpty {
|
||||||
|
return cs
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func ppCharsetFilter(cs string, set map[uint8]bool, allowEmpty bool) string {
|
||||||
|
result := ""
|
||||||
|
for _, chr := range cs {
|
||||||
|
if _, ok := set[uint8(chr)]; ok {
|
||||||
|
result += string(chr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if result == "" && !allowEmpty {
|
||||||
|
return cs
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func PronouncablePasswordExt(rng io.Reader, pwlen int) (string, float64) {
|
||||||
|
|
||||||
|
// kinda pseudo markov-chain - with a few extra rules and no weights...
|
||||||
|
|
||||||
|
if pwlen <= 0 {
|
||||||
|
return "", 0
|
||||||
|
}
|
||||||
|
|
||||||
|
vowelCount := 0
|
||||||
|
consoCount := 0
|
||||||
|
entropy := float64(0)
|
||||||
|
|
||||||
|
startChar := ppRand(rng, ppStartChar, &entropy)
|
||||||
|
|
||||||
|
result := string(startChar)
|
||||||
|
currentChar := startChar
|
||||||
|
|
||||||
|
isVowel, isConsonant := ppCharType(currentChar)
|
||||||
|
if isVowel {
|
||||||
|
vowelCount = 1
|
||||||
|
}
|
||||||
|
if isConsonant {
|
||||||
|
consoCount = ppMaxRepeatedConsonant
|
||||||
|
}
|
||||||
|
|
||||||
|
segmentLen := 1
|
||||||
|
|
||||||
|
segmentLenTarget := ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin)
|
||||||
|
|
||||||
|
for len(result) < pwlen {
|
||||||
|
|
||||||
|
charset := ppContinuation[currentChar]
|
||||||
|
if vowelCount >= ppMaxRepeatedVowel {
|
||||||
|
charset = ppCharsetRemove(charset, ppVowelMap, false)
|
||||||
|
}
|
||||||
|
if consoCount >= ppMaxRepeatedConsonant {
|
||||||
|
charset = ppCharsetRemove(charset, ppConsonantMap, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
lastOfSegment := false
|
||||||
|
newSegment := false
|
||||||
|
|
||||||
|
if len(result)+1 == pwlen {
|
||||||
|
// last of result
|
||||||
|
charset = ppCharsetFilter(charset, ppEndCharMap, false)
|
||||||
|
} else if segmentLen+1 == segmentLenTarget {
|
||||||
|
// last of segment
|
||||||
|
charsetNew := ppCharsetFilter(charset, ppEndCharMap, true)
|
||||||
|
if charsetNew != "" {
|
||||||
|
charset = charsetNew
|
||||||
|
lastOfSegment = true
|
||||||
|
}
|
||||||
|
} else if segmentLen >= segmentLenTarget {
|
||||||
|
// (perhaps) start of new segment
|
||||||
|
if _, ok := ppEndCharMap[currentChar]; ok {
|
||||||
|
charset = ppStartChar
|
||||||
|
newSegment = true
|
||||||
|
} else {
|
||||||
|
// continue segment for one more char to (hopefully) find an end-char
|
||||||
|
charsetNew := ppCharsetFilter(charset, ppEndCharMap, true)
|
||||||
|
if charsetNew != "" {
|
||||||
|
charset = charsetNew
|
||||||
|
lastOfSegment = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// normal continuation
|
||||||
|
}
|
||||||
|
|
||||||
|
newChar := ppRand(rng, charset, &entropy)
|
||||||
|
if lastOfSegment {
|
||||||
|
currentChar = newChar
|
||||||
|
segmentLen++
|
||||||
|
result += strings.ToLower(string(newChar))
|
||||||
|
} else if newSegment {
|
||||||
|
currentChar = newChar
|
||||||
|
segmentLen = 1
|
||||||
|
result += strings.ToUpper(string(newChar))
|
||||||
|
segmentLenTarget = ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin)
|
||||||
|
vowelCount = 0
|
||||||
|
consoCount = 0
|
||||||
|
} else {
|
||||||
|
currentChar = newChar
|
||||||
|
segmentLen++
|
||||||
|
result += strings.ToLower(string(newChar))
|
||||||
|
}
|
||||||
|
|
||||||
|
isVowel, isConsonant := ppCharType(currentChar)
|
||||||
|
if isVowel {
|
||||||
|
vowelCount++
|
||||||
|
consoCount = 0
|
||||||
|
}
|
||||||
|
if isConsonant {
|
||||||
|
vowelCount = 0
|
||||||
|
if newSegment {
|
||||||
|
consoCount = ppMaxRepeatedConsonant
|
||||||
|
} else {
|
||||||
|
consoCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, entropy
|
||||||
|
}
|
||||||
|
|
||||||
|
func PronouncablePassword(len int) string {
|
||||||
|
v, _ := PronouncablePasswordExt(rand.Reader, len)
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
func PronouncablePasswordSeeded(seed int64, len int) string {
|
||||||
|
|
||||||
|
v, _ := PronouncablePasswordExt(mathrand.New(mathrand.NewSource(seed)), len)
|
||||||
|
return v
|
||||||
|
}
|
||||||
35
cryptext/pronouncablePassword_test.go
Normal file
35
cryptext/pronouncablePassword_test.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package cryptext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math/rand"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestPronouncablePasswordExt(t *testing.T) {
|
||||||
|
for i := 0; i < 20; i++ {
|
||||||
|
pw, entropy := PronouncablePasswordExt(rand.New(rand.NewSource(int64(i))), 16)
|
||||||
|
fmt.Printf("[%.2f] => %s\n", entropy, pw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPronouncablePasswordSeeded(t *testing.T) {
|
||||||
|
for i := 0; i < 20; i++ {
|
||||||
|
pw := PronouncablePasswordSeeded(int64(i), 8)
|
||||||
|
fmt.Printf("%s\n", pw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPronouncablePassword(t *testing.T) {
|
||||||
|
for i := 0; i < 20; i++ {
|
||||||
|
pw := PronouncablePassword(i + 1)
|
||||||
|
fmt.Printf("%s\n", pw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPronouncablePasswordWrongLen(t *testing.T) {
|
||||||
|
PronouncablePassword(0)
|
||||||
|
PronouncablePassword(-1)
|
||||||
|
PronouncablePassword(-2)
|
||||||
|
PronouncablePassword(-3)
|
||||||
|
}
|
||||||
@@ -4,6 +4,10 @@ import (
|
|||||||
"go.mongodb.org/mongo-driver/mongo"
|
"go.mongodb.org/mongo-driver/mongo"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type RawFilter interface {
|
||||||
|
FilterQuery() mongo.Pipeline
|
||||||
|
}
|
||||||
|
|
||||||
type Filter interface {
|
type Filter interface {
|
||||||
FilterQuery() mongo.Pipeline
|
FilterQuery() mongo.Pipeline
|
||||||
Pagination() (string, SortDirection, string, SortDirection)
|
Pagination() (string, SortDirection, string, SortDirection)
|
||||||
|
|||||||
113
dataext/syncMap.go
Normal file
113
dataext/syncMap.go
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
package dataext
|
||||||
|
|
||||||
|
import "sync"
|
||||||
|
|
||||||
|
type SyncMap[TKey comparable, TData any] struct {
|
||||||
|
data map[TKey]TData
|
||||||
|
lock sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SyncMap[TKey, TData]) Set(key TKey, data TData) {
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
if s.data == nil {
|
||||||
|
s.data = make(map[TKey]TData)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.data[key] = data
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SyncMap[TKey, TData]) SetIfNotContains(key TKey, data TData) bool {
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
if s.data == nil {
|
||||||
|
s.data = make(map[TKey]TData)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, existsInPreState := s.data[key]; existsInPreState {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
s.data[key] = data
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) {
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
if s.data == nil {
|
||||||
|
s.data = make(map[TKey]TData)
|
||||||
|
}
|
||||||
|
|
||||||
|
if v, ok := s.data[key]; ok {
|
||||||
|
return v, true
|
||||||
|
} else {
|
||||||
|
return *new(TData), false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SyncMap[TKey, TData]) Delete(key TKey) bool {
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
if s.data == nil {
|
||||||
|
s.data = make(map[TKey]TData)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, ok := s.data[key]
|
||||||
|
|
||||||
|
delete(s.data, key)
|
||||||
|
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SyncMap[TKey, TData]) Contains(key TKey) bool {
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
if s.data == nil {
|
||||||
|
s.data = make(map[TKey]TData)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, ok := s.data[key]
|
||||||
|
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SyncMap[TKey, TData]) GetAllKeys() []TKey {
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
if s.data == nil {
|
||||||
|
s.data = make(map[TKey]TData)
|
||||||
|
}
|
||||||
|
|
||||||
|
r := make([]TKey, 0, len(s.data))
|
||||||
|
|
||||||
|
for k := range s.data {
|
||||||
|
r = append(r, k)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SyncMap[TKey, TData]) GetAllValues() []TData {
|
||||||
|
s.lock.Lock()
|
||||||
|
defer s.lock.Unlock()
|
||||||
|
|
||||||
|
if s.data == nil {
|
||||||
|
s.data = make(map[TKey]TData)
|
||||||
|
}
|
||||||
|
|
||||||
|
r := make([]TData, 0, len(s.data))
|
||||||
|
|
||||||
|
for _, v := range s.data {
|
||||||
|
r = append(r, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
@@ -7,6 +7,9 @@ type SyncSet[TData comparable] struct {
|
|||||||
lock sync.Mutex
|
lock sync.Mutex
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add adds `value` to the set
|
||||||
|
// returns true if the value was actually inserted
|
||||||
|
// returns false if the value already existed
|
||||||
func (s *SyncSet[TData]) Add(value TData) bool {
|
func (s *SyncSet[TData]) Add(value TData) bool {
|
||||||
s.lock.Lock()
|
s.lock.Lock()
|
||||||
defer s.lock.Unlock()
|
defer s.lock.Unlock()
|
||||||
@@ -15,10 +18,10 @@ func (s *SyncSet[TData]) Add(value TData) bool {
|
|||||||
s.data = make(map[TData]bool)
|
s.data = make(map[TData]bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, ok := s.data[value]
|
_, existsInPreState := s.data[value]
|
||||||
s.data[value] = true
|
s.data[value] = true
|
||||||
|
|
||||||
return !ok
|
return !existsInPreState
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SyncSet[TData]) AddAll(values []TData) {
|
func (s *SyncSet[TData]) AddAll(values []TData) {
|
||||||
|
|||||||
170
dataext/tuple.go
Normal file
170
dataext/tuple.go
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
package dataext
|
||||||
|
|
||||||
|
type ValueGroup interface {
|
||||||
|
TupleLength() int
|
||||||
|
TupleValues() []any
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Single[T1 any] struct {
|
||||||
|
V1 T1
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s Single[T1]) TupleLength() int {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s Single[T1]) TupleValues() []any {
|
||||||
|
return []any{s.V1}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Tuple[T1 any, T2 any] struct {
|
||||||
|
V1 T1
|
||||||
|
V2 T2
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Tuple[T1, T2]) TupleLength() int {
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Tuple[T1, T2]) TupleValues() []any {
|
||||||
|
return []any{t.V1, t.V2}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Triple[T1 any, T2 any, T3 any] struct {
|
||||||
|
V1 T1
|
||||||
|
V2 T2
|
||||||
|
V3 T3
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Triple[T1, T2, T3]) TupleLength() int {
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Triple[T1, T2, T3]) TupleValues() []any {
|
||||||
|
return []any{t.V1, t.V2, t.V3}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Quadruple[T1 any, T2 any, T3 any, T4 any] struct {
|
||||||
|
V1 T1
|
||||||
|
V2 T2
|
||||||
|
V3 T3
|
||||||
|
V4 T4
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Quadruple[T1, T2, T3, T4]) TupleLength() int {
|
||||||
|
return 4
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any {
|
||||||
|
return []any{t.V1, t.V2, t.V3, t.V4}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct {
|
||||||
|
V1 T1
|
||||||
|
V2 T2
|
||||||
|
V3 T3
|
||||||
|
V4 T4
|
||||||
|
V5 T5
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Quintuple[T1, T2, T3, T4, T5]) TupleLength() int {
|
||||||
|
return 5
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any {
|
||||||
|
return []any{t.V1, t.V2, t.V3, t.V4, t.V5}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct {
|
||||||
|
V1 T1
|
||||||
|
V2 T2
|
||||||
|
V3 T3
|
||||||
|
V4 T4
|
||||||
|
V5 T5
|
||||||
|
V6 T6
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleLength() int {
|
||||||
|
return 6
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any {
|
||||||
|
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct {
|
||||||
|
V1 T1
|
||||||
|
V2 T2
|
||||||
|
V3 T3
|
||||||
|
V4 T4
|
||||||
|
V5 T5
|
||||||
|
V6 T6
|
||||||
|
V7 T7
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleLength() int {
|
||||||
|
return 7
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any {
|
||||||
|
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct {
|
||||||
|
V1 T1
|
||||||
|
V2 T2
|
||||||
|
V3 T3
|
||||||
|
V4 T4
|
||||||
|
V5 T5
|
||||||
|
V6 T6
|
||||||
|
V7 T7
|
||||||
|
V8 T8
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleLength() int {
|
||||||
|
return 8
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any {
|
||||||
|
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct {
|
||||||
|
V1 T1
|
||||||
|
V2 T2
|
||||||
|
V3 T3
|
||||||
|
V4 T4
|
||||||
|
V5 T5
|
||||||
|
V6 T6
|
||||||
|
V7 T7
|
||||||
|
V8 T8
|
||||||
|
V9 T9
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int {
|
||||||
|
return 9
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any {
|
||||||
|
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9}
|
||||||
|
}
|
||||||
31
enums/enum.go
Normal file
31
enums/enum.go
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
package enums
|
||||||
|
|
||||||
|
type Enum interface {
|
||||||
|
Valid() bool
|
||||||
|
ValuesAny() []any
|
||||||
|
ValuesMeta() []EnumMetaValue
|
||||||
|
VarName() string
|
||||||
|
}
|
||||||
|
|
||||||
|
type StringEnum interface {
|
||||||
|
Enum
|
||||||
|
String() string
|
||||||
|
}
|
||||||
|
|
||||||
|
type DescriptionEnum interface {
|
||||||
|
Enum
|
||||||
|
Description() string
|
||||||
|
DescriptionMeta() EnumDescriptionMetaValue
|
||||||
|
}
|
||||||
|
|
||||||
|
type EnumMetaValue struct {
|
||||||
|
VarName string `json:"varName"`
|
||||||
|
Value Enum `json:"value"`
|
||||||
|
Description *string `json:"description"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type EnumDescriptionMetaValue struct {
|
||||||
|
VarName string `json:"varName"`
|
||||||
|
Value Enum `json:"value"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
}
|
||||||
485
exerr/builder.go
Normal file
485
exerr/builder.go
Normal file
@@ -0,0 +1,485 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/enums"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"runtime/debug"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
//
|
||||||
|
// ==== USAGE =====
|
||||||
|
//
|
||||||
|
// If some method returns an error _always wrap it into an exerror:
|
||||||
|
// value, err := do_something(..)
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, exerror.Wrap(err, "do something failed").Build()
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// If possible add metadata to the error (eg the id that was not found, ...), the methods are the same as in zerolog
|
||||||
|
// return nil, exerror.Wrap(err, "do something failed").Str("someid", id).Int("count", in.Count).Build()
|
||||||
|
//
|
||||||
|
// You can change the errortype with `.User()` and `.System()` (User-errors are 400 and System-errors 500)
|
||||||
|
// You can also manually set the statuscode with `.WithStatuscode(http.NotFound)`
|
||||||
|
// You can set the type with `WithType(..)`
|
||||||
|
//
|
||||||
|
// New Errors (that don't wrap an existing err object) are created with New
|
||||||
|
// return nil, exerror.New(exerror.TypeInternal, "womethign wen horrible wrong").Build()
|
||||||
|
// You can eitehr use an existing ErrorType, the "catch-all" ErrInternal, or add you own ErrType in consts.go
|
||||||
|
//
|
||||||
|
// All errors should be handled one of the following four ways:
|
||||||
|
// - return the error to the caller and let him handle it:
|
||||||
|
// (also auto-prints the error to the log)
|
||||||
|
// => Wrap/New + Build
|
||||||
|
// - Print the error
|
||||||
|
// (also auto-sends it to the error-service)
|
||||||
|
// This is useful for errors that happen asynchron or are non-fatal for the current request
|
||||||
|
// => Wrap/New + Print
|
||||||
|
// - Return the error to the Rest-API caller
|
||||||
|
// (also auto-prints the error to the log)
|
||||||
|
// (also auto-sends it to the error-service)
|
||||||
|
// => Wrap/New + Output
|
||||||
|
// - Print and stop the service
|
||||||
|
// (also auto-sends it to the error-service)
|
||||||
|
// => Wrap/New + Fatal
|
||||||
|
//
|
||||||
|
|
||||||
|
var stackSkipLogger zerolog.Logger
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
cw := zerolog.ConsoleWriter{
|
||||||
|
Out: os.Stdout,
|
||||||
|
TimeFormat: "2006-01-02 15:04:05 Z07:00",
|
||||||
|
}
|
||||||
|
|
||||||
|
multi := zerolog.MultiLevelWriter(cw)
|
||||||
|
stackSkipLogger = zerolog.New(multi).With().Timestamp().CallerWithSkipFrameCount(4).Logger()
|
||||||
|
}
|
||||||
|
|
||||||
|
type Builder struct {
|
||||||
|
wrappedErr error
|
||||||
|
errorData *ExErr
|
||||||
|
containsGinData bool
|
||||||
|
noLog bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func Get(err error) *Builder {
|
||||||
|
return &Builder{errorData: FromError(err)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(t ErrorType, msg string) *Builder {
|
||||||
|
return &Builder{errorData: newExErr(CatSystem, t, msg)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Wrap(err error, msg string) *Builder {
|
||||||
|
if err == nil {
|
||||||
|
return &Builder{errorData: newExErr(CatSystem, TypeInternal, msg)} // prevent NPE if we call Wrap with err==nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if !pkgconfig.RecursiveErrors {
|
||||||
|
v := FromError(err)
|
||||||
|
v.Message = msg
|
||||||
|
return &Builder{wrappedErr: err, errorData: v}
|
||||||
|
}
|
||||||
|
return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func (b *Builder) WithType(t ErrorType) *Builder {
|
||||||
|
b.errorData.Type = t
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) WithStatuscode(status int) *Builder {
|
||||||
|
b.errorData.StatusCode = &status
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) WithMessage(msg string) *Builder {
|
||||||
|
b.errorData.Message = msg
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// Err changes the Severity to ERROR (default)
|
||||||
|
// The error will be:
|
||||||
|
//
|
||||||
|
// - On Build():
|
||||||
|
//
|
||||||
|
// - Short-Logged as Err
|
||||||
|
//
|
||||||
|
// - On Print():
|
||||||
|
//
|
||||||
|
// - Logged as Err
|
||||||
|
//
|
||||||
|
// - Send to the error-service
|
||||||
|
//
|
||||||
|
// - On Output():
|
||||||
|
//
|
||||||
|
// - Logged as Err
|
||||||
|
//
|
||||||
|
// - Send to the error-service
|
||||||
|
func (b *Builder) Err() *Builder {
|
||||||
|
b.errorData.Severity = SevErr
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warn changes the Severity to WARN
|
||||||
|
// The error will be:
|
||||||
|
//
|
||||||
|
// - On Build():
|
||||||
|
//
|
||||||
|
// - -(nothing)-
|
||||||
|
//
|
||||||
|
// - On Print():
|
||||||
|
//
|
||||||
|
// - Short-Logged as Warn
|
||||||
|
//
|
||||||
|
// - On Output():
|
||||||
|
//
|
||||||
|
// - Logged as Warn
|
||||||
|
func (b *Builder) Warn() *Builder {
|
||||||
|
b.errorData.Severity = SevWarn
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// Info changes the Severity to INFO
|
||||||
|
// The error will be:
|
||||||
|
//
|
||||||
|
// - On Build():
|
||||||
|
//
|
||||||
|
// - -(nothing)-
|
||||||
|
//
|
||||||
|
// - On Print():
|
||||||
|
//
|
||||||
|
// - -(nothing)-
|
||||||
|
//
|
||||||
|
// - On Output():
|
||||||
|
//
|
||||||
|
// - -(nothing)-
|
||||||
|
func (b *Builder) Info() *Builder {
|
||||||
|
b.errorData.Severity = SevInfo
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// User sets the Category to CatUser
|
||||||
|
//
|
||||||
|
// Errors with category
|
||||||
|
func (b *Builder) User() *Builder {
|
||||||
|
b.errorData.Category = CatUser
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) System() *Builder {
|
||||||
|
b.errorData.Category = CatSystem
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func (b *Builder) NoLog() *Builder {
|
||||||
|
b.noLog = true
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func (b *Builder) Id(key string, val fmt.Stringer) *Builder {
|
||||||
|
return b.addMeta(key, MDTID, newIDWrap(val))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) StrPtr(key string, val *string) *Builder {
|
||||||
|
return b.addMeta(key, MDTStringPtr, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Str(key string, val string) *Builder {
|
||||||
|
return b.addMeta(key, MDTString, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Int(key string, val int) *Builder {
|
||||||
|
return b.addMeta(key, MDTInt, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Int8(key string, val int8) *Builder {
|
||||||
|
return b.addMeta(key, MDTInt8, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Int16(key string, val int16) *Builder {
|
||||||
|
return b.addMeta(key, MDTInt16, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Int32(key string, val int32) *Builder {
|
||||||
|
return b.addMeta(key, MDTInt32, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Int64(key string, val int64) *Builder {
|
||||||
|
return b.addMeta(key, MDTInt64, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Float32(key string, val float32) *Builder {
|
||||||
|
return b.addMeta(key, MDTFloat32, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Float64(key string, val float64) *Builder {
|
||||||
|
return b.addMeta(key, MDTFloat64, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Bool(key string, val bool) *Builder {
|
||||||
|
return b.addMeta(key, MDTBool, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Bytes(key string, val []byte) *Builder {
|
||||||
|
return b.addMeta(key, MDTBytes, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) ObjectID(key string, val primitive.ObjectID) *Builder {
|
||||||
|
return b.addMeta(key, MDTObjectID, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Time(key string, val time.Time) *Builder {
|
||||||
|
return b.addMeta(key, MDTTime, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Dur(key string, val time.Duration) *Builder {
|
||||||
|
return b.addMeta(key, MDTDuration, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Strs(key string, val []string) *Builder {
|
||||||
|
return b.addMeta(key, MDTStringArray, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Ints(key string, val []int) *Builder {
|
||||||
|
return b.addMeta(key, MDTIntArray, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Ints32(key string, val []int32) *Builder {
|
||||||
|
return b.addMeta(key, MDTInt32Array, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Type(key string, cls interface{}) *Builder {
|
||||||
|
return b.addMeta(key, MDTString, fmt.Sprintf("%T", cls))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Interface(key string, val interface{}) *Builder {
|
||||||
|
return b.addMeta(key, MDTAny, newAnyWrap(val))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Any(key string, val any) *Builder {
|
||||||
|
return b.addMeta(key, MDTAny, newAnyWrap(val))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder {
|
||||||
|
if langext.IsNil(val) {
|
||||||
|
return b.addMeta(key, MDTString, "(!nil)")
|
||||||
|
} else {
|
||||||
|
return b.addMeta(key, MDTString, val.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Enum(key string, val enums.Enum) *Builder {
|
||||||
|
return b.addMeta(key, MDTEnum, newEnumWrap(val))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Stack() *Builder {
|
||||||
|
return b.addMeta("@Stack", MDTString, string(debug.Stack()))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Errs(key string, val []error) *Builder {
|
||||||
|
for i, valerr := range val {
|
||||||
|
b.addMeta(fmt.Sprintf("%v[%v]", key, i), MDTString, Get(valerr).errorData.FormatLog(LogPrintFull))
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder {
|
||||||
|
if v := ctx.Value("start_timestamp"); v != nil {
|
||||||
|
if t, ok := v.(time.Time); ok {
|
||||||
|
b.Time("ctx.startTimestamp", t)
|
||||||
|
b.Time("ctx.endTimestamp", time.Now())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b.Str("gin.method", req.Method)
|
||||||
|
b.Str("gin.path", g.FullPath())
|
||||||
|
b.Strs("gin.header", extractHeader(g.Request.Header))
|
||||||
|
if req.URL != nil {
|
||||||
|
b.Str("gin.url", req.URL.String())
|
||||||
|
}
|
||||||
|
if ctxVal := g.GetString("apiversion"); ctxVal != "" {
|
||||||
|
b.Str("gin.context.apiversion", ctxVal)
|
||||||
|
}
|
||||||
|
if ctxVal := g.GetString("uid"); ctxVal != "" {
|
||||||
|
b.Str("gin.context.uid", ctxVal)
|
||||||
|
}
|
||||||
|
if ctxVal := g.GetString("fcmId"); ctxVal != "" {
|
||||||
|
b.Str("gin.context.fcmid", ctxVal)
|
||||||
|
}
|
||||||
|
if ctxVal := g.GetString("reqid"); ctxVal != "" {
|
||||||
|
b.Str("gin.context.reqid", ctxVal)
|
||||||
|
}
|
||||||
|
if req.Method != "GET" && req.Body != nil {
|
||||||
|
|
||||||
|
if req.Header.Get("Content-Type") == "application/json" {
|
||||||
|
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
|
||||||
|
if bin, err := brc.BufferedAll(); err == nil {
|
||||||
|
if len(bin) < 16*1024 {
|
||||||
|
var prettyJSON bytes.Buffer
|
||||||
|
err = json.Indent(&prettyJSON, bin, "", " ")
|
||||||
|
if err == nil {
|
||||||
|
b.Str("gin.body", string(prettyJSON.Bytes()))
|
||||||
|
} else {
|
||||||
|
b.Bytes("gin.body", bin)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Header.Get("Content-Type") == "multipart/form-data" || req.Header.Get("Content-Type") == "x-www-form-urlencoded" {
|
||||||
|
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
|
||||||
|
if bin, err := brc.BufferedAll(); err == nil {
|
||||||
|
if len(bin) < 16*1024 {
|
||||||
|
b.Bytes("gin.body", bin)
|
||||||
|
} else {
|
||||||
|
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
b.containsGinData = true
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatHeader(header map[string][]string) string {
|
||||||
|
ml := 1
|
||||||
|
for k, _ := range header {
|
||||||
|
if len(k) > ml {
|
||||||
|
ml = len(k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r := ""
|
||||||
|
for k, v := range header {
|
||||||
|
if r != "" {
|
||||||
|
r += "\n"
|
||||||
|
}
|
||||||
|
for _, hval := range v {
|
||||||
|
value := hval
|
||||||
|
value = strings.ReplaceAll(value, "\n", "\\n")
|
||||||
|
value = strings.ReplaceAll(value, "\r", "\\r")
|
||||||
|
value = strings.ReplaceAll(value, "\t", "\\t")
|
||||||
|
r += langext.StrPadRight(k, " ", ml) + " := " + value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractHeader(header map[string][]string) []string {
|
||||||
|
r := make([]string, 0, len(header))
|
||||||
|
for k, v := range header {
|
||||||
|
for _, hval := range v {
|
||||||
|
value := hval
|
||||||
|
value = strings.ReplaceAll(value, "\n", "\\n")
|
||||||
|
value = strings.ReplaceAll(value, "\r", "\\r")
|
||||||
|
value = strings.ReplaceAll(value, "\t", "\\t")
|
||||||
|
r = append(r, k+": "+value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// Build creates a new error, ready to pass up the stack
|
||||||
|
// If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout
|
||||||
|
// Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces
|
||||||
|
// Can be locally suppressed with Builder.NoLog()
|
||||||
|
func (b *Builder) Build() error {
|
||||||
|
warnOnPkgConfigNotInitialized()
|
||||||
|
|
||||||
|
if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil {
|
||||||
|
return b.wrappedErr
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) {
|
||||||
|
b.errorData.ShortLog(stackSkipLogger.Error())
|
||||||
|
} else if pkgconfig.ZeroLogAllTraces && !b.noLog {
|
||||||
|
b.errorData.ShortLog(stackSkipLogger.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
b.errorData.CallListener(MethodBuild)
|
||||||
|
|
||||||
|
return b.errorData
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output prints the error onto the gin stdout.
|
||||||
|
// The error also gets printed to stdout/stderr
|
||||||
|
// If the error is SevErr|SevFatal we also send it to the error-service
|
||||||
|
func (b *Builder) Output(ctx context.Context, g *gin.Context) {
|
||||||
|
if !b.containsGinData && g.Request != nil {
|
||||||
|
// Auto-Add gin metadata if the caller hasn't already done it
|
||||||
|
b.GinReq(ctx, g, g.Request)
|
||||||
|
}
|
||||||
|
|
||||||
|
b.errorData.Output(g)
|
||||||
|
|
||||||
|
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
|
||||||
|
b.errorData.Log(stackSkipLogger.Error())
|
||||||
|
} else if b.errorData.Severity == SevWarn {
|
||||||
|
b.errorData.Log(stackSkipLogger.Warn())
|
||||||
|
}
|
||||||
|
|
||||||
|
b.errorData.CallListener(MethodOutput)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print prints the error
|
||||||
|
// If the error is SevErr we also send it to the error-service
|
||||||
|
func (b *Builder) Print() {
|
||||||
|
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
|
||||||
|
b.errorData.Log(stackSkipLogger.Error())
|
||||||
|
} else if b.errorData.Severity == SevWarn {
|
||||||
|
b.errorData.ShortLog(stackSkipLogger.Warn())
|
||||||
|
}
|
||||||
|
|
||||||
|
b.errorData.CallListener(MethodPrint)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Format(level LogPrintLevel) string {
|
||||||
|
return b.errorData.FormatLog(level)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fatal prints the error and terminates the program
|
||||||
|
// If the error is SevErr we also send it to the error-service
|
||||||
|
func (b *Builder) Fatal() {
|
||||||
|
b.errorData.Severity = SevFatal
|
||||||
|
b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel))
|
||||||
|
|
||||||
|
b.errorData.CallListener(MethodFatal)
|
||||||
|
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func (b *Builder) addMeta(key string, mdtype metaDataType, val interface{}) *Builder {
|
||||||
|
b.errorData.Meta.add(key, mdtype, val)
|
||||||
|
return b
|
||||||
|
}
|
||||||
204
exerr/constructor.go
Normal file
204
exerr/constructor.go
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"reflect"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var reflectTypeStr = reflect.TypeOf("")
|
||||||
|
|
||||||
|
func FromError(err error) *ExErr {
|
||||||
|
if verr, ok := err.(*ExErr); ok {
|
||||||
|
// A simple ExErr
|
||||||
|
return verr
|
||||||
|
}
|
||||||
|
|
||||||
|
// A foreign error (eg a MongoDB exception)
|
||||||
|
return &ExErr{
|
||||||
|
UniqueID: newID(),
|
||||||
|
Category: CatForeign,
|
||||||
|
Type: TypeInternal,
|
||||||
|
Severity: SevErr,
|
||||||
|
Timestamp: time.Time{},
|
||||||
|
StatusCode: nil,
|
||||||
|
Message: err.Error(),
|
||||||
|
WrappedErrType: fmt.Sprintf("%T", err),
|
||||||
|
WrappedErr: err,
|
||||||
|
Caller: "",
|
||||||
|
OriginalError: nil,
|
||||||
|
Meta: getForeignMeta(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newExErr(cat ErrorCategory, errtype ErrorType, msg string) *ExErr {
|
||||||
|
return &ExErr{
|
||||||
|
UniqueID: newID(),
|
||||||
|
Category: cat,
|
||||||
|
Type: errtype,
|
||||||
|
Severity: SevErr,
|
||||||
|
Timestamp: time.Now(),
|
||||||
|
StatusCode: nil,
|
||||||
|
Message: msg,
|
||||||
|
WrappedErrType: "",
|
||||||
|
WrappedErr: nil,
|
||||||
|
Caller: callername(2),
|
||||||
|
OriginalError: nil,
|
||||||
|
Meta: make(map[string]MetaValue),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExErr {
|
||||||
|
return &ExErr{
|
||||||
|
UniqueID: newID(),
|
||||||
|
Category: cat,
|
||||||
|
Type: TypeWrap,
|
||||||
|
Severity: SevErr,
|
||||||
|
Timestamp: time.Now(),
|
||||||
|
StatusCode: e.StatusCode,
|
||||||
|
Message: msg,
|
||||||
|
WrappedErrType: "",
|
||||||
|
WrappedErr: nil,
|
||||||
|
Caller: callername(1 + stacktraceskip),
|
||||||
|
OriginalError: e,
|
||||||
|
Meta: make(map[string]MetaValue),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getForeignMeta(err error) (mm MetaMap) {
|
||||||
|
mm = make(map[string]MetaValue)
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if panicerr := recover(); panicerr != nil {
|
||||||
|
New(TypePanic, "Panic while trying to get foreign meta").
|
||||||
|
Str("source", err.Error()).
|
||||||
|
Interface("panic-object", panicerr).
|
||||||
|
Stack().
|
||||||
|
Print()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
rval := reflect.ValueOf(err)
|
||||||
|
if rval.Kind() == reflect.Interface || rval.Kind() == reflect.Ptr {
|
||||||
|
rval = reflect.ValueOf(err).Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
mm.add("foreign.errortype", MDTString, rval.Type().String())
|
||||||
|
|
||||||
|
for k, v := range addMetaPrefix("foreign", getReflectedMetaValues(err, 8)) {
|
||||||
|
mm[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
return mm
|
||||||
|
}
|
||||||
|
|
||||||
|
func getReflectedMetaValues(value interface{}, remainingDepth int) map[string]MetaValue {
|
||||||
|
|
||||||
|
if remainingDepth <= 0 {
|
||||||
|
return map[string]MetaValue{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if langext.IsNil(value) {
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTNil, Value: nil}}
|
||||||
|
}
|
||||||
|
|
||||||
|
rval := reflect.ValueOf(value)
|
||||||
|
|
||||||
|
if rval.Type().Kind() == reflect.Ptr {
|
||||||
|
|
||||||
|
if rval.IsNil() {
|
||||||
|
return map[string]MetaValue{"*": {DataType: MDTNil, Value: nil}}
|
||||||
|
}
|
||||||
|
|
||||||
|
elem := rval.Elem()
|
||||||
|
|
||||||
|
return addMetaPrefix("*", getReflectedMetaValues(elem.Interface(), remainingDepth-1))
|
||||||
|
}
|
||||||
|
|
||||||
|
if !rval.CanInterface() {
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTString, Value: "<<no-interface>>"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
raw := rval.Interface()
|
||||||
|
|
||||||
|
switch ifraw := raw.(type) {
|
||||||
|
case time.Time:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTTime, Value: ifraw}}
|
||||||
|
case time.Duration:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTDuration, Value: ifraw}}
|
||||||
|
case int:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTInt, Value: ifraw}}
|
||||||
|
case int8:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTInt8, Value: ifraw}}
|
||||||
|
case int16:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTInt16, Value: ifraw}}
|
||||||
|
case int32:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTInt32, Value: ifraw}}
|
||||||
|
case int64:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTInt64, Value: ifraw}}
|
||||||
|
case string:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTString, Value: ifraw}}
|
||||||
|
case bool:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTBool, Value: ifraw}}
|
||||||
|
case []byte:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTBytes, Value: ifraw}}
|
||||||
|
case float32:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTFloat32, Value: ifraw}}
|
||||||
|
case float64:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTFloat64, Value: ifraw}}
|
||||||
|
case []int:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTIntArray, Value: ifraw}}
|
||||||
|
case []int32:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTInt32Array, Value: ifraw}}
|
||||||
|
case primitive.ObjectID:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTObjectID, Value: ifraw}}
|
||||||
|
case []string:
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTStringArray, Value: ifraw}}
|
||||||
|
}
|
||||||
|
|
||||||
|
if rval.Type().Kind() == reflect.Struct {
|
||||||
|
m := make(map[string]MetaValue)
|
||||||
|
for i := 0; i < rval.NumField(); i++ {
|
||||||
|
fieldtype := rval.Type().Field(i)
|
||||||
|
|
||||||
|
fieldname := fieldtype.Name
|
||||||
|
|
||||||
|
if fieldtype.IsExported() {
|
||||||
|
for k, v := range addMetaPrefix(fieldname, getReflectedMetaValues(rval.Field(i).Interface(), remainingDepth-1)) {
|
||||||
|
m[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
if rval.Type().ConvertibleTo(reflectTypeStr) {
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTString, Value: rval.Convert(reflectTypeStr).String()}}
|
||||||
|
}
|
||||||
|
|
||||||
|
jsonval, err := json.Marshal(value)
|
||||||
|
if err != nil {
|
||||||
|
panic(err) // gets recovered later up
|
||||||
|
}
|
||||||
|
|
||||||
|
return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func addMetaPrefix(prefix string, m map[string]MetaValue) map[string]MetaValue {
|
||||||
|
if len(m) == 1 {
|
||||||
|
for k, v := range m {
|
||||||
|
if k == "" {
|
||||||
|
return map[string]MetaValue{prefix: v}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r := make(map[string]MetaValue, len(m))
|
||||||
|
for k, v := range m {
|
||||||
|
r[prefix+"."+k] = v
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
95
exerr/data.go
Normal file
95
exerr/data.go
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ErrorCategory struct{ Category string }
|
||||||
|
|
||||||
|
var (
|
||||||
|
CatWrap = ErrorCategory{"Wrap"} // The error is simply wrapping another error (e.g. when a grpc call returns an error)
|
||||||
|
CatSystem = ErrorCategory{"System"} // An internal system error (e.g. connection to db failed)
|
||||||
|
CatUser = ErrorCategory{"User"} // The user (the API caller) did something wrong (e.g. he has no permissions to do this)
|
||||||
|
CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value
|
||||||
|
)
|
||||||
|
|
||||||
|
//goland:noinspection GoUnusedGlobalVariable
|
||||||
|
var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign}
|
||||||
|
|
||||||
|
type ErrorSeverity struct{ Severity string }
|
||||||
|
|
||||||
|
var (
|
||||||
|
SevTrace = ErrorSeverity{"Trace"}
|
||||||
|
SevDebug = ErrorSeverity{"Debug"}
|
||||||
|
SevInfo = ErrorSeverity{"Info"}
|
||||||
|
SevWarn = ErrorSeverity{"Warn"}
|
||||||
|
SevErr = ErrorSeverity{"Err"}
|
||||||
|
SevFatal = ErrorSeverity{"Fatal"}
|
||||||
|
)
|
||||||
|
|
||||||
|
//goland:noinspection GoUnusedGlobalVariable
|
||||||
|
var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal}
|
||||||
|
|
||||||
|
type ErrorType struct {
|
||||||
|
Key string
|
||||||
|
DefaultStatusCode *int
|
||||||
|
}
|
||||||
|
|
||||||
|
//goland:noinspection GoUnusedGlobalVariable
|
||||||
|
var (
|
||||||
|
TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
|
||||||
|
TypePanic = NewType("PANIC", langext.Ptr(500))
|
||||||
|
TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
|
||||||
|
|
||||||
|
TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
|
||||||
|
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
|
||||||
|
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
|
||||||
|
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
|
||||||
|
TypeMongoInvalidOpt = NewType("MONGO_INVALIDOPT", langext.Ptr(500))
|
||||||
|
|
||||||
|
TypeSQLQuery = NewType("SQL_QUERY", langext.Ptr(500))
|
||||||
|
TypeSQLBuild = NewType("SQL_BUILD", langext.Ptr(500))
|
||||||
|
TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500))
|
||||||
|
|
||||||
|
TypeWrap = NewType("Wrap", nil)
|
||||||
|
|
||||||
|
TypeBindFailURI = NewType("BINDFAIL_URI", langext.Ptr(400))
|
||||||
|
TypeBindFailQuery = NewType("BINDFAIL_QUERY", langext.Ptr(400))
|
||||||
|
TypeBindFailJSON = NewType("BINDFAIL_JSON", langext.Ptr(400))
|
||||||
|
TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400))
|
||||||
|
TypeBindFailHeader = NewType("BINDFAIL_HEADER", langext.Ptr(400))
|
||||||
|
|
||||||
|
TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400))
|
||||||
|
TypeInvalidCSID = NewType("INVALID_CSID", langext.Ptr(400))
|
||||||
|
|
||||||
|
TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400))
|
||||||
|
TypeGoogleResponse = NewType("GOOGLE_RESPONSE", langext.Ptr(400))
|
||||||
|
|
||||||
|
TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401))
|
||||||
|
TypeAuthFailed = NewType("AUTH_FAILED", langext.Ptr(401))
|
||||||
|
|
||||||
|
// other values come from the downstream application that uses goext
|
||||||
|
)
|
||||||
|
|
||||||
|
var registeredTypes = dataext.SyncMap[string, ErrorType]{}
|
||||||
|
|
||||||
|
func NewType(key string, defStatusCode *int) ErrorType {
|
||||||
|
et := ErrorType{key, defStatusCode}
|
||||||
|
|
||||||
|
registeredTypes.Set(key, et)
|
||||||
|
|
||||||
|
return et
|
||||||
|
}
|
||||||
|
|
||||||
|
func ListRegisteredTypes() []ErrorType {
|
||||||
|
return registeredTypes.GetAllValues()
|
||||||
|
}
|
||||||
|
|
||||||
|
type LogPrintLevel string
|
||||||
|
|
||||||
|
const (
|
||||||
|
LogPrintFull LogPrintLevel = "Full"
|
||||||
|
LogPrintOverview LogPrintLevel = "Overview"
|
||||||
|
LogPrintShort LogPrintLevel = "Short"
|
||||||
|
)
|
||||||
88
exerr/errinit.go
Normal file
88
exerr/errinit.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ErrorPackageConfig struct {
|
||||||
|
ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal)
|
||||||
|
ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities)
|
||||||
|
RecursiveErrors bool // errors contains their Origin-Error
|
||||||
|
ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output()
|
||||||
|
IncludeMetaInGinOutput bool // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output()
|
||||||
|
ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields
|
||||||
|
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields
|
||||||
|
DisableErrorWrapping bool // Disables the exerr.Wrap()...Build() function - will always return the original error
|
||||||
|
}
|
||||||
|
|
||||||
|
type ErrorPackageConfigInit struct {
|
||||||
|
ZeroLogErrTraces *bool
|
||||||
|
ZeroLogAllTraces *bool
|
||||||
|
RecursiveErrors *bool
|
||||||
|
ExtendedGinOutput *bool
|
||||||
|
IncludeMetaInGinOutput *bool
|
||||||
|
ExtendGinOutput func(err *ExErr, json map[string]any)
|
||||||
|
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any)
|
||||||
|
DisableErrorWrapping *bool
|
||||||
|
}
|
||||||
|
|
||||||
|
var initialized = false
|
||||||
|
|
||||||
|
var pkgconfig = ErrorPackageConfig{
|
||||||
|
ZeroLogErrTraces: true,
|
||||||
|
ZeroLogAllTraces: false,
|
||||||
|
RecursiveErrors: true,
|
||||||
|
ExtendedGinOutput: false,
|
||||||
|
IncludeMetaInGinOutput: true,
|
||||||
|
ExtendGinOutput: func(err *ExErr, json map[string]any) {},
|
||||||
|
ExtendGinDataOutput: func(err *ExErr, depth int, json map[string]any) {},
|
||||||
|
DisableErrorWrapping: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Init initializes the exerr packages
|
||||||
|
// Must be called at the program start, before (!) any errors
|
||||||
|
// Is not thread-safe
|
||||||
|
func Init(cfg ErrorPackageConfigInit) {
|
||||||
|
if initialized {
|
||||||
|
panic("Cannot re-init error package")
|
||||||
|
}
|
||||||
|
|
||||||
|
ego := func(err *ExErr, json map[string]any) {}
|
||||||
|
egdo := func(err *ExErr, depth int, json map[string]any) {}
|
||||||
|
|
||||||
|
if cfg.ExtendGinOutput != nil {
|
||||||
|
ego = cfg.ExtendGinOutput
|
||||||
|
}
|
||||||
|
if cfg.ExtendGinDataOutput != nil {
|
||||||
|
egdo = cfg.ExtendGinDataOutput
|
||||||
|
}
|
||||||
|
|
||||||
|
pkgconfig = ErrorPackageConfig{
|
||||||
|
ZeroLogErrTraces: langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces),
|
||||||
|
ZeroLogAllTraces: langext.Coalesce(cfg.ZeroLogAllTraces, pkgconfig.ZeroLogAllTraces),
|
||||||
|
RecursiveErrors: langext.Coalesce(cfg.RecursiveErrors, pkgconfig.RecursiveErrors),
|
||||||
|
ExtendedGinOutput: langext.Coalesce(cfg.ExtendedGinOutput, pkgconfig.ExtendedGinOutput),
|
||||||
|
IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput),
|
||||||
|
ExtendGinOutput: ego,
|
||||||
|
ExtendGinDataOutput: egdo,
|
||||||
|
DisableErrorWrapping: langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping),
|
||||||
|
}
|
||||||
|
|
||||||
|
initialized = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func Initialized() bool {
|
||||||
|
return initialized
|
||||||
|
}
|
||||||
|
|
||||||
|
func warnOnPkgConfigNotInitialized() {
|
||||||
|
if !initialized {
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Printf("%s\n", langext.StrRepeat("=", 80))
|
||||||
|
fmt.Printf("%s\n", "[WARNING] exerr package used without initializiation")
|
||||||
|
fmt.Printf("%s\n", " call exerr.Init() in your main() function")
|
||||||
|
fmt.Printf("%s\n", langext.StrRepeat("=", 80))
|
||||||
|
fmt.Printf("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
383
exerr/exerr.go
Normal file
383
exerr/exerr.go
Normal file
@@ -0,0 +1,383 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/rs/xid"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExErr struct {
|
||||||
|
UniqueID string `json:"uniqueID"`
|
||||||
|
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
Category ErrorCategory `json:"category"`
|
||||||
|
Severity ErrorSeverity `json:"severity"`
|
||||||
|
Type ErrorType `json:"type"`
|
||||||
|
|
||||||
|
StatusCode *int `json:"statusCode"`
|
||||||
|
|
||||||
|
Message string `json:"message"`
|
||||||
|
WrappedErrType string `json:"wrappedErrType"`
|
||||||
|
WrappedErr any `json:"-"`
|
||||||
|
Caller string `json:"caller"`
|
||||||
|
|
||||||
|
OriginalError *ExErr `json:"originalError"`
|
||||||
|
|
||||||
|
Meta MetaMap `json:"meta"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) Error() string {
|
||||||
|
return ee.RecursiveMessage()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap must be implemented so that some error.XXX methods work
|
||||||
|
func (ee *ExErr) Unwrap() error {
|
||||||
|
if ee.OriginalError == nil {
|
||||||
|
return nil // this is neccessary - otherwise we return a wrapped nil and the `x == nil` comparison fails (= panic in errors.Is and other failures)
|
||||||
|
}
|
||||||
|
return ee.OriginalError
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is must be implemented so that error.Is(x) works
|
||||||
|
func (ee *ExErr) Is(e error) bool {
|
||||||
|
return IsFrom(ee, e)
|
||||||
|
}
|
||||||
|
|
||||||
|
// As must be implemented so that error.As(x) works
|
||||||
|
//
|
||||||
|
//goland:noinspection GoTypeAssertionOnErrors
|
||||||
|
func (ee *ExErr) As(target any) bool {
|
||||||
|
if dstErr, ok := target.(*ExErr); ok {
|
||||||
|
|
||||||
|
if dst0, ok := ee.contains(dstErr); ok {
|
||||||
|
dstErr = dst0
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
val := reflect.ValueOf(target)
|
||||||
|
|
||||||
|
typStr := val.Type().Elem().String()
|
||||||
|
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.Category == CatForeign && curr.WrappedErrType == typStr && curr.WrappedErr != nil {
|
||||||
|
val.Elem().Set(reflect.ValueOf(curr.WrappedErr))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) Log(evt *zerolog.Event) {
|
||||||
|
evt.Msg(ee.FormatLog(LogPrintFull))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
|
||||||
|
if lvl == LogPrintShort {
|
||||||
|
|
||||||
|
msg := ee.Message
|
||||||
|
if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign {
|
||||||
|
msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")"
|
||||||
|
}
|
||||||
|
|
||||||
|
if ee.Type != TypeWrap {
|
||||||
|
return "[" + ee.Type.Key + "] " + msg
|
||||||
|
} else {
|
||||||
|
return msg
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if lvl == LogPrintOverview {
|
||||||
|
|
||||||
|
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
|
||||||
|
|
||||||
|
indent := ""
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
indent += " "
|
||||||
|
|
||||||
|
str += indent
|
||||||
|
str += "-> "
|
||||||
|
strmsg := strings.Trim(curr.Message, " \r\n\t")
|
||||||
|
if lbidx := strings.Index(curr.Message, "\n"); lbidx >= 0 {
|
||||||
|
strmsg = strmsg[0:lbidx]
|
||||||
|
}
|
||||||
|
strmsg = langext.StrLimit(strmsg, 61, "...")
|
||||||
|
str += strmsg
|
||||||
|
str += "\n"
|
||||||
|
|
||||||
|
}
|
||||||
|
return str
|
||||||
|
|
||||||
|
} else if lvl == LogPrintFull {
|
||||||
|
|
||||||
|
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
|
||||||
|
|
||||||
|
indent := ""
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
indent += " "
|
||||||
|
|
||||||
|
etype := ee.Type.Key
|
||||||
|
if ee.Type == TypeWrap {
|
||||||
|
etype = "~"
|
||||||
|
}
|
||||||
|
|
||||||
|
str += indent
|
||||||
|
str += "-> ["
|
||||||
|
str += etype
|
||||||
|
if curr.Category == CatForeign {
|
||||||
|
str += "|Foreign"
|
||||||
|
}
|
||||||
|
str += "] "
|
||||||
|
str += strings.ReplaceAll(curr.Message, "\n", " ")
|
||||||
|
if curr.Caller != "" {
|
||||||
|
str += " (@ "
|
||||||
|
str += curr.Caller
|
||||||
|
str += ")"
|
||||||
|
}
|
||||||
|
str += "\n"
|
||||||
|
|
||||||
|
if curr.Meta.Any() {
|
||||||
|
meta := indent + " {" + curr.Meta.FormatOneLine(240) + "}"
|
||||||
|
if len(meta) < 200 {
|
||||||
|
str += meta
|
||||||
|
str += "\n"
|
||||||
|
} else {
|
||||||
|
str += curr.Meta.FormatMultiLine(indent+" ", " ", 1024)
|
||||||
|
str += "\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return str
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
return "[?[" + ee.UniqueID + "]?]"
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) ShortLog(evt *zerolog.Event) {
|
||||||
|
ee.Meta.Apply(evt, langext.Ptr(240)).Msg(ee.FormatLog(LogPrintShort))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RecursiveMessage returns the message to show
|
||||||
|
// = first error (top-down) that is not wrapping/foreign/empty
|
||||||
|
// = lowest level error (that is not empty)
|
||||||
|
// = fallback to self.message
|
||||||
|
func (ee *ExErr) RecursiveMessage() string {
|
||||||
|
|
||||||
|
// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty
|
||||||
|
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign {
|
||||||
|
return curr.Message
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==== [2] ==== lowest level error (that is not empty)
|
||||||
|
|
||||||
|
deepestMsg := ""
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.Message != "" {
|
||||||
|
deepestMsg = curr.Message
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if deepestMsg != "" {
|
||||||
|
return deepestMsg
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==== [3] ==== fallback to self.message
|
||||||
|
|
||||||
|
return ee.Message
|
||||||
|
}
|
||||||
|
|
||||||
|
// RecursiveType returns the statuscode to use
|
||||||
|
// = first error (top-down) that is not wrapping/empty
|
||||||
|
func (ee *ExErr) RecursiveType() ErrorType {
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.Type != TypeWrap {
|
||||||
|
return curr.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// fallback to self
|
||||||
|
return ee.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
// RecursiveStatuscode returns the HTTP Statuscode to use
|
||||||
|
// = first error (top-down) that has a statuscode set
|
||||||
|
func (ee *ExErr) RecursiveStatuscode() *int {
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.StatusCode != nil {
|
||||||
|
return langext.Ptr(*curr.StatusCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RecursiveCategory returns the ErrorCategory to use
|
||||||
|
// = first error (top-down) that has a statuscode set
|
||||||
|
func (ee *ExErr) RecursiveCategory() ErrorCategory {
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.Category != CatWrap {
|
||||||
|
return curr.Category
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fallback to <empty>
|
||||||
|
return ee.Category
|
||||||
|
}
|
||||||
|
|
||||||
|
// RecursiveMeta searches (top-down) for teh first error that has a meta value with teh specified key
|
||||||
|
// and returns its value (or nil)
|
||||||
|
func (ee *ExErr) RecursiveMeta(key string) *MetaValue {
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if metaval, ok := curr.Meta[key]; ok {
|
||||||
|
return langext.Ptr(metaval)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Depth returns the depth of recursively contained errors
|
||||||
|
func (ee *ExErr) Depth() int {
|
||||||
|
if ee.OriginalError == nil {
|
||||||
|
return 1
|
||||||
|
} else {
|
||||||
|
return ee.OriginalError.Depth() + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMeta returns the meta value with the specified key
|
||||||
|
// this method recurses through all wrapped errors and returns the first matching meta value
|
||||||
|
func (ee *ExErr) GetMeta(key string) (any, bool) {
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if v, ok := curr.Meta[key]; ok {
|
||||||
|
return v.Value, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMetaString functions the same as GetMeta, but returns false if the type does not match
|
||||||
|
func (ee *ExErr) GetMetaString(key string) (string, bool) {
|
||||||
|
if v1, ok := ee.GetMeta(key); ok {
|
||||||
|
if v2, ok := v1.(string); ok {
|
||||||
|
return v2, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) GetMetaBool(key string) (bool, bool) {
|
||||||
|
if v1, ok := ee.GetMeta(key); ok {
|
||||||
|
if v2, ok := v1.(bool); ok {
|
||||||
|
return v2, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) GetMetaInt(key string) (int, bool) {
|
||||||
|
if v1, ok := ee.GetMeta(key); ok {
|
||||||
|
if v2, ok := v1.(int); ok {
|
||||||
|
return v2, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) GetMetaFloat32(key string) (float32, bool) {
|
||||||
|
if v1, ok := ee.GetMeta(key); ok {
|
||||||
|
if v2, ok := v1.(float32); ok {
|
||||||
|
return v2, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) GetMetaFloat64(key string) (float64, bool) {
|
||||||
|
if v1, ok := ee.GetMeta(key); ok {
|
||||||
|
if v2, ok := v1.(float64); ok {
|
||||||
|
return v2, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) GetMetaTime(key string) (time.Time, bool) {
|
||||||
|
if v1, ok := ee.GetMeta(key); ok {
|
||||||
|
if v2, ok := v1.(time.Time); ok {
|
||||||
|
return v2, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return time.Time{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains test if the supplied error is contained in this error (anywhere in the chain)
|
||||||
|
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
|
||||||
|
if original == nil {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
if ee == original {
|
||||||
|
return ee, true
|
||||||
|
}
|
||||||
|
|
||||||
|
for curr := ee; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.equalsDirectProperties(curr) {
|
||||||
|
return curr, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// equalsDirectProperties tests if ee and other are equals, but only looks at primary properties (not `OriginalError` or `Meta`)
|
||||||
|
func (ee *ExErr) equalsDirectProperties(other *ExErr) bool {
|
||||||
|
|
||||||
|
if ee.UniqueID != other.UniqueID {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ee.Timestamp != other.Timestamp {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ee.Category != other.Category {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ee.Severity != other.Severity {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ee.Type != other.Type {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ee.StatusCode != other.StatusCode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ee.Message != other.Message {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ee.WrappedErrType != other.WrappedErrType {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ee.Caller != other.Caller {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func newID() string {
|
||||||
|
return xid.New().String()
|
||||||
|
}
|
||||||
93
exerr/exerr_test.go
Normal file
93
exerr/exerr_test.go
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
type golangErr struct {
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g golangErr) Error() string {
|
||||||
|
return g.Message
|
||||||
|
}
|
||||||
|
|
||||||
|
type golangErr2 struct {
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g golangErr2) Error() string {
|
||||||
|
return g.Message
|
||||||
|
}
|
||||||
|
|
||||||
|
type simpleError struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g simpleError) Error() string {
|
||||||
|
return "Something simple went wroong"
|
||||||
|
}
|
||||||
|
|
||||||
|
type simpleError2 struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g simpleError2) Error() string {
|
||||||
|
return "Something simple went wroong"
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExErrIs1(t *testing.T) {
|
||||||
|
e0 := simpleError{}
|
||||||
|
|
||||||
|
wrap := Wrap(e0, "something went wrong").Str("test", "123").Build()
|
||||||
|
|
||||||
|
tst.AssertTrue(t, errors.Is(wrap, simpleError{}))
|
||||||
|
tst.AssertFalse(t, errors.Is(wrap, golangErr{}))
|
||||||
|
tst.AssertFalse(t, errors.Is(wrap, golangErr{"error1"}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExErrIs2(t *testing.T) {
|
||||||
|
e0 := golangErr{"error1"}
|
||||||
|
|
||||||
|
wrap := Wrap(e0, "something went wrong").Str("test", "123").Build()
|
||||||
|
|
||||||
|
tst.AssertTrue(t, errors.Is(wrap, e0))
|
||||||
|
tst.AssertTrue(t, errors.Is(wrap, golangErr{"error1"}))
|
||||||
|
tst.AssertFalse(t, errors.Is(wrap, golangErr{"error2"}))
|
||||||
|
tst.AssertFalse(t, errors.Is(wrap, simpleError{}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExErrAs(t *testing.T) {
|
||||||
|
|
||||||
|
e0 := golangErr{"error1"}
|
||||||
|
|
||||||
|
w0 := Wrap(e0, "something went wrong").Str("test", "123").Build()
|
||||||
|
|
||||||
|
{
|
||||||
|
out := golangErr{}
|
||||||
|
ok := errors.As(w0, &out)
|
||||||
|
tst.AssertTrue(t, ok)
|
||||||
|
tst.AssertEqual(t, out.Message, "error1")
|
||||||
|
}
|
||||||
|
|
||||||
|
w1 := Wrap(w0, "outher error").Build()
|
||||||
|
|
||||||
|
{
|
||||||
|
out := golangErr{}
|
||||||
|
ok := errors.As(w1, &out)
|
||||||
|
tst.AssertTrue(t, ok)
|
||||||
|
tst.AssertEqual(t, out.Message, "error1")
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
out := golangErr2{}
|
||||||
|
ok := errors.As(w1, &out)
|
||||||
|
tst.AssertFalse(t, ok)
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
out := simpleError2{}
|
||||||
|
ok := errors.As(w1, &out)
|
||||||
|
tst.AssertFalse(t, ok)
|
||||||
|
}
|
||||||
|
}
|
||||||
125
exerr/gin.go
Normal file
125
exerr/gin.go
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) langext.H {
|
||||||
|
ginJson := langext.H{}
|
||||||
|
|
||||||
|
if ee.UniqueID != "" {
|
||||||
|
ginJson["id"] = ee.UniqueID
|
||||||
|
}
|
||||||
|
if ee.Category != CatWrap {
|
||||||
|
ginJson["category"] = ee.Category
|
||||||
|
}
|
||||||
|
if ee.Type != TypeWrap {
|
||||||
|
ginJson["type"] = ee.Type
|
||||||
|
}
|
||||||
|
if ee.StatusCode != nil {
|
||||||
|
ginJson["statuscode"] = ee.StatusCode
|
||||||
|
}
|
||||||
|
if ee.Message != "" {
|
||||||
|
ginJson["message"] = ee.Message
|
||||||
|
}
|
||||||
|
if ee.Caller != "" {
|
||||||
|
ginJson["caller"] = ee.Caller
|
||||||
|
}
|
||||||
|
if ee.Severity != SevErr {
|
||||||
|
ginJson["severity"] = ee.Severity
|
||||||
|
}
|
||||||
|
if ee.Timestamp != (time.Time{}) {
|
||||||
|
ginJson["time"] = ee.Timestamp.Format(time.RFC3339)
|
||||||
|
}
|
||||||
|
if ee.WrappedErrType != "" {
|
||||||
|
ginJson["wrappedErrType"] = ee.WrappedErrType
|
||||||
|
}
|
||||||
|
if ee.OriginalError != nil {
|
||||||
|
ginJson["original"] = ee.OriginalError.toJson(depth+1, applyExtendListener, outputMeta)
|
||||||
|
}
|
||||||
|
|
||||||
|
if outputMeta {
|
||||||
|
metaJson := langext.H{}
|
||||||
|
for metaKey, metaVal := range ee.Meta {
|
||||||
|
metaJson[metaKey] = metaVal.rawValueForJson()
|
||||||
|
}
|
||||||
|
ginJson["meta"] = metaJson
|
||||||
|
}
|
||||||
|
|
||||||
|
if applyExtendListener {
|
||||||
|
pkgconfig.ExtendGinDataOutput(ee, depth, ginJson)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ginJson
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) ToDefaultAPIJson() (string, error) {
|
||||||
|
|
||||||
|
gjr := json.GoJsonRender{Data: ee.ToAPIJson(true, pkgconfig.ExtendedGinOutput, pkgconfig.IncludeMetaInGinOutput), NilSafeSlices: true, NilSafeMaps: true}
|
||||||
|
|
||||||
|
r, err := gjr.RenderString()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return r, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToAPIJson converts the ExError to a json object
|
||||||
|
// (the same object as used in the Output(gin) method)
|
||||||
|
//
|
||||||
|
// Parameters:
|
||||||
|
// - [applyExtendListener]: if false the pkgconfig.ExtendGinOutput / pkgconfig.ExtendGinDataOutput will not be applied
|
||||||
|
// - [includeWrappedErrors]: if false we do not include the recursive/wrapped errors in `__data`
|
||||||
|
// - [includeMetaFields]: if true we also include meta-values (aka from `.Str(key, value).Build()`), needs includeWrappedErrors=true
|
||||||
|
func (ee *ExErr) ToAPIJson(applyExtendListener bool, includeWrappedErrors bool, includeMetaFields bool) langext.H {
|
||||||
|
|
||||||
|
apiOutput := langext.H{
|
||||||
|
"errorid": ee.UniqueID,
|
||||||
|
"message": ee.RecursiveMessage(),
|
||||||
|
"errorcode": ee.RecursiveType().Key,
|
||||||
|
"category": ee.RecursiveCategory().Category,
|
||||||
|
}
|
||||||
|
|
||||||
|
if includeWrappedErrors {
|
||||||
|
apiOutput["__data"] = ee.toJson(0, applyExtendListener, includeMetaFields)
|
||||||
|
}
|
||||||
|
|
||||||
|
if applyExtendListener {
|
||||||
|
pkgconfig.ExtendGinOutput(ee, apiOutput)
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiOutput
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) Output(g *gin.Context) {
|
||||||
|
|
||||||
|
warnOnPkgConfigNotInitialized()
|
||||||
|
|
||||||
|
var statuscode = http.StatusInternalServerError
|
||||||
|
|
||||||
|
var baseCat = ee.RecursiveCategory()
|
||||||
|
var baseType = ee.RecursiveType()
|
||||||
|
var baseStatuscode = ee.RecursiveStatuscode()
|
||||||
|
|
||||||
|
if baseCat == CatUser {
|
||||||
|
statuscode = http.StatusBadRequest
|
||||||
|
} else if baseCat == CatSystem {
|
||||||
|
statuscode = http.StatusInternalServerError
|
||||||
|
}
|
||||||
|
|
||||||
|
if baseStatuscode != nil {
|
||||||
|
statuscode = *ee.StatusCode
|
||||||
|
} else if baseType.DefaultStatusCode != nil {
|
||||||
|
statuscode = *baseType.DefaultStatusCode
|
||||||
|
}
|
||||||
|
|
||||||
|
ginOutput := ee.ToAPIJson(true, pkgconfig.ExtendedGinOutput, pkgconfig.IncludeMetaInGinOutput)
|
||||||
|
|
||||||
|
g.Render(statuscode, json.GoJsonRender{Data: ginOutput, NilSafeSlices: true, NilSafeMaps: true})
|
||||||
|
}
|
||||||
88
exerr/helper.go
Normal file
88
exerr/helper.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
// IsType test if the supplied error is of the specified ErrorType.
|
||||||
|
func IsType(err error, errType ErrorType) bool {
|
||||||
|
if err == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
bmerr := FromError(err)
|
||||||
|
for bmerr != nil {
|
||||||
|
if bmerr.Type == errType {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
bmerr = bmerr.OriginalError
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsFrom test if the supplied error stems originally from original
|
||||||
|
func IsFrom(e error, original error) bool {
|
||||||
|
if e == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
//goland:noinspection GoDirectComparisonOfErrors
|
||||||
|
if e == original {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
bmerr := FromError(e)
|
||||||
|
for bmerr == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for curr := bmerr; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.Category == CatForeign && curr.Message == original.Error() && curr.WrappedErrType == fmt.Sprintf("%T", original) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasSourceMessage tests if the supplied error stems originally from an error with the message msg
|
||||||
|
func HasSourceMessage(e error, msg string) bool {
|
||||||
|
if e == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
bmerr := FromError(e)
|
||||||
|
for bmerr == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for curr := bmerr; curr != nil; curr = curr.OriginalError {
|
||||||
|
if curr.OriginalError == nil && curr.Message == msg {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func MessageMatch(e error, matcher func(string) bool) bool {
|
||||||
|
if e == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if matcher(e.Error()) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
bmerr := FromError(e)
|
||||||
|
for bmerr == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for curr := bmerr; curr != nil; curr = curr.OriginalError {
|
||||||
|
if matcher(curr.Message) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
35
exerr/listener.go
Normal file
35
exerr/listener.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Method string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MethodOutput Method = "OUTPUT"
|
||||||
|
MethodPrint Method = "PRINT"
|
||||||
|
MethodBuild Method = "BUILD"
|
||||||
|
MethodFatal Method = "FATAL"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Listener = func(method Method, v *ExErr)
|
||||||
|
|
||||||
|
var listenerLock = sync.Mutex{}
|
||||||
|
var listener = make([]Listener, 0)
|
||||||
|
|
||||||
|
func RegisterListener(l Listener) {
|
||||||
|
listenerLock.Lock()
|
||||||
|
defer listenerLock.Unlock()
|
||||||
|
|
||||||
|
listener = append(listener, l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ee *ExErr) CallListener(m Method) {
|
||||||
|
listenerLock.Lock()
|
||||||
|
defer listenerLock.Unlock()
|
||||||
|
|
||||||
|
for _, v := range listener {
|
||||||
|
v(m, ee)
|
||||||
|
}
|
||||||
|
}
|
||||||
759
exerr/meta.go
Normal file
759
exerr/meta.go
Normal file
@@ -0,0 +1,759 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"go.mongodb.org/mongo-driver/bson"
|
||||||
|
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// This is a buffed up map[string]any
|
||||||
|
// we also save type information of the map-values
|
||||||
|
// which allows us to deserialize them back into te correct types later
|
||||||
|
|
||||||
|
type MetaMap map[string]MetaValue
|
||||||
|
|
||||||
|
type metaDataType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MDTString metaDataType = "String"
|
||||||
|
MDTStringPtr metaDataType = "StringPtr"
|
||||||
|
MDTInt metaDataType = "Int"
|
||||||
|
MDTInt8 metaDataType = "Int8"
|
||||||
|
MDTInt16 metaDataType = "Int16"
|
||||||
|
MDTInt32 metaDataType = "Int32"
|
||||||
|
MDTInt64 metaDataType = "Int64"
|
||||||
|
MDTFloat32 metaDataType = "Float32"
|
||||||
|
MDTFloat64 metaDataType = "Float64"
|
||||||
|
MDTBool metaDataType = "Bool"
|
||||||
|
MDTBytes metaDataType = "Bytes"
|
||||||
|
MDTObjectID metaDataType = "ObjectID"
|
||||||
|
MDTTime metaDataType = "Time"
|
||||||
|
MDTDuration metaDataType = "Duration"
|
||||||
|
MDTStringArray metaDataType = "StringArr"
|
||||||
|
MDTIntArray metaDataType = "IntArr"
|
||||||
|
MDTInt32Array metaDataType = "Int32Arr"
|
||||||
|
MDTID metaDataType = "ID"
|
||||||
|
MDTAny metaDataType = "Interface"
|
||||||
|
MDTNil metaDataType = "Nil"
|
||||||
|
MDTEnum metaDataType = "Enum"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MetaValue struct {
|
||||||
|
DataType metaDataType `json:"dataType"`
|
||||||
|
Value interface{} `json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type metaValueSerialization struct {
|
||||||
|
DataType metaDataType `bson:"dataType"`
|
||||||
|
Value string `bson:"value"`
|
||||||
|
Raw interface{} `bson:"raw"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v MetaValue) SerializeValue() (string, error) {
|
||||||
|
switch v.DataType {
|
||||||
|
case MDTString:
|
||||||
|
return v.Value.(string), nil
|
||||||
|
case MDTID:
|
||||||
|
return v.Value.(IDWrap).Serialize(), nil
|
||||||
|
case MDTAny:
|
||||||
|
return v.Value.(AnyWrap).Serialize(), nil
|
||||||
|
case MDTStringPtr:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "#", nil
|
||||||
|
}
|
||||||
|
r := v.Value.(*string)
|
||||||
|
if r != nil {
|
||||||
|
return "*" + *r, nil
|
||||||
|
} else {
|
||||||
|
return "#", nil
|
||||||
|
}
|
||||||
|
case MDTInt:
|
||||||
|
return strconv.Itoa(v.Value.(int)), nil
|
||||||
|
case MDTInt8:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int8)), 10), nil
|
||||||
|
case MDTInt16:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int16)), 10), nil
|
||||||
|
case MDTInt32:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int32)), 10), nil
|
||||||
|
case MDTInt64:
|
||||||
|
return strconv.FormatInt(v.Value.(int64), 10), nil
|
||||||
|
case MDTFloat32:
|
||||||
|
return strconv.FormatFloat(float64(v.Value.(float32)), 'X', -1, 32), nil
|
||||||
|
case MDTFloat64:
|
||||||
|
return strconv.FormatFloat(v.Value.(float64), 'X', -1, 64), nil
|
||||||
|
case MDTBool:
|
||||||
|
if v.Value.(bool) {
|
||||||
|
return "true", nil
|
||||||
|
} else {
|
||||||
|
return "false", nil
|
||||||
|
}
|
||||||
|
case MDTBytes:
|
||||||
|
return hex.EncodeToString(v.Value.([]byte)), nil
|
||||||
|
case MDTObjectID:
|
||||||
|
return v.Value.(primitive.ObjectID).Hex(), nil
|
||||||
|
case MDTTime:
|
||||||
|
return strconv.FormatInt(v.Value.(time.Time).Unix(), 10) + "|" + strconv.FormatInt(int64(v.Value.(time.Time).Nanosecond()), 10), nil
|
||||||
|
case MDTDuration:
|
||||||
|
return v.Value.(time.Duration).String(), nil
|
||||||
|
case MDTStringArray:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "#", nil
|
||||||
|
}
|
||||||
|
r, err := json.Marshal(v.Value.([]string))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(r), nil
|
||||||
|
case MDTIntArray:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "#", nil
|
||||||
|
}
|
||||||
|
r, err := json.Marshal(v.Value.([]int))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(r), nil
|
||||||
|
case MDTInt32Array:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "#", nil
|
||||||
|
}
|
||||||
|
r, err := json.Marshal(v.Value.([]int32))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(r), nil
|
||||||
|
case MDTNil:
|
||||||
|
return "", nil
|
||||||
|
case MDTEnum:
|
||||||
|
return v.Value.(EnumWrap).Serialize(), nil
|
||||||
|
}
|
||||||
|
return "", errors.New("Unknown type: " + string(v.DataType))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v MetaValue) ShortString(lim int) string {
|
||||||
|
switch v.DataType {
|
||||||
|
case MDTString:
|
||||||
|
r := strings.ReplaceAll(v.Value.(string), "\r", "")
|
||||||
|
r = strings.ReplaceAll(r, "\n", "\\n")
|
||||||
|
r = strings.ReplaceAll(r, "\t", "\\t")
|
||||||
|
return langext.StrLimit(r, lim, "...")
|
||||||
|
case MDTID:
|
||||||
|
return v.Value.(IDWrap).String()
|
||||||
|
case MDTAny:
|
||||||
|
return v.Value.(AnyWrap).String()
|
||||||
|
case MDTStringPtr:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "<<null>>"
|
||||||
|
}
|
||||||
|
r := langext.CoalesceString(v.Value.(*string), "<<null>>")
|
||||||
|
r = strings.ReplaceAll(r, "\r", "")
|
||||||
|
r = strings.ReplaceAll(r, "\n", "\\n")
|
||||||
|
r = strings.ReplaceAll(r, "\t", "\\t")
|
||||||
|
return langext.StrLimit(r, lim, "...")
|
||||||
|
case MDTInt:
|
||||||
|
return strconv.Itoa(v.Value.(int))
|
||||||
|
case MDTInt8:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int8)), 10)
|
||||||
|
case MDTInt16:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int16)), 10)
|
||||||
|
case MDTInt32:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int32)), 10)
|
||||||
|
case MDTInt64:
|
||||||
|
return strconv.FormatInt(v.Value.(int64), 10)
|
||||||
|
case MDTFloat32:
|
||||||
|
return strconv.FormatFloat(float64(v.Value.(float32)), 'g', 4, 32)
|
||||||
|
case MDTFloat64:
|
||||||
|
return strconv.FormatFloat(v.Value.(float64), 'g', 4, 64)
|
||||||
|
case MDTBool:
|
||||||
|
return fmt.Sprintf("%v", v.Value.(bool))
|
||||||
|
case MDTBytes:
|
||||||
|
return langext.StrLimit(hex.EncodeToString(v.Value.([]byte)), lim, "...")
|
||||||
|
case MDTObjectID:
|
||||||
|
return v.Value.(primitive.ObjectID).Hex()
|
||||||
|
case MDTTime:
|
||||||
|
return v.Value.(time.Time).Format(time.RFC3339)
|
||||||
|
case MDTDuration:
|
||||||
|
return v.Value.(time.Duration).String()
|
||||||
|
case MDTStringArray:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "<<null>>"
|
||||||
|
}
|
||||||
|
r, err := json.Marshal(v.Value.([]string))
|
||||||
|
if err != nil {
|
||||||
|
return "(err)"
|
||||||
|
}
|
||||||
|
return langext.StrLimit(string(r), lim, "...")
|
||||||
|
case MDTIntArray:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "<<null>>"
|
||||||
|
}
|
||||||
|
r, err := json.Marshal(v.Value.([]int))
|
||||||
|
if err != nil {
|
||||||
|
return "(err)"
|
||||||
|
}
|
||||||
|
return langext.StrLimit(string(r), lim, "...")
|
||||||
|
case MDTInt32Array:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "<<null>>"
|
||||||
|
}
|
||||||
|
r, err := json.Marshal(v.Value.([]int32))
|
||||||
|
if err != nil {
|
||||||
|
return "(err)"
|
||||||
|
}
|
||||||
|
return langext.StrLimit(string(r), lim, "...")
|
||||||
|
case MDTNil:
|
||||||
|
return "<<null>>"
|
||||||
|
case MDTEnum:
|
||||||
|
return v.Value.(EnumWrap).String()
|
||||||
|
}
|
||||||
|
return "(err)"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v MetaValue) Apply(key string, evt *zerolog.Event, limitLen *int) *zerolog.Event {
|
||||||
|
switch v.DataType {
|
||||||
|
case MDTString:
|
||||||
|
if limitLen == nil {
|
||||||
|
return evt.Str(key, v.Value.(string))
|
||||||
|
} else {
|
||||||
|
return evt.Str(key, langext.StrLimit(v.Value.(string), *limitLen, "..."))
|
||||||
|
}
|
||||||
|
case MDTID:
|
||||||
|
return evt.Str(key, v.Value.(IDWrap).Value)
|
||||||
|
case MDTAny:
|
||||||
|
if v.Value.(AnyWrap).IsError {
|
||||||
|
return evt.Str(key, "(err)")
|
||||||
|
} else {
|
||||||
|
if limitLen == nil {
|
||||||
|
return evt.Str(key, v.Value.(AnyWrap).Json)
|
||||||
|
} else {
|
||||||
|
return evt.Str(key, langext.StrLimit(v.Value.(AnyWrap).Json, *limitLen, "..."))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case MDTStringPtr:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return evt.Str(key, "<<null>>")
|
||||||
|
}
|
||||||
|
if limitLen == nil {
|
||||||
|
return evt.Str(key, langext.CoalesceString(v.Value.(*string), "<<null>>"))
|
||||||
|
} else {
|
||||||
|
return evt.Str(key, langext.StrLimit(langext.CoalesceString(v.Value.(*string), "<<null>>"), *limitLen, "..."))
|
||||||
|
}
|
||||||
|
case MDTInt:
|
||||||
|
return evt.Int(key, v.Value.(int))
|
||||||
|
case MDTInt8:
|
||||||
|
return evt.Int8(key, v.Value.(int8))
|
||||||
|
case MDTInt16:
|
||||||
|
return evt.Int16(key, v.Value.(int16))
|
||||||
|
case MDTInt32:
|
||||||
|
return evt.Int32(key, v.Value.(int32))
|
||||||
|
case MDTInt64:
|
||||||
|
return evt.Int64(key, v.Value.(int64))
|
||||||
|
case MDTFloat32:
|
||||||
|
return evt.Float32(key, v.Value.(float32))
|
||||||
|
case MDTFloat64:
|
||||||
|
return evt.Float64(key, v.Value.(float64))
|
||||||
|
case MDTBool:
|
||||||
|
return evt.Bool(key, v.Value.(bool))
|
||||||
|
case MDTBytes:
|
||||||
|
return evt.Bytes(key, v.Value.([]byte))
|
||||||
|
case MDTObjectID:
|
||||||
|
return evt.Str(key, v.Value.(primitive.ObjectID).Hex())
|
||||||
|
case MDTTime:
|
||||||
|
return evt.Time(key, v.Value.(time.Time))
|
||||||
|
case MDTDuration:
|
||||||
|
return evt.Dur(key, v.Value.(time.Duration))
|
||||||
|
case MDTStringArray:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return evt.Strs(key, nil)
|
||||||
|
}
|
||||||
|
return evt.Strs(key, v.Value.([]string))
|
||||||
|
case MDTIntArray:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return evt.Ints(key, nil)
|
||||||
|
}
|
||||||
|
return evt.Ints(key, v.Value.([]int))
|
||||||
|
case MDTInt32Array:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return evt.Ints32(key, nil)
|
||||||
|
}
|
||||||
|
return evt.Ints32(key, v.Value.([]int32))
|
||||||
|
case MDTNil:
|
||||||
|
return evt.Str(key, "<<null>>")
|
||||||
|
case MDTEnum:
|
||||||
|
if v.Value.(EnumWrap).IsNil {
|
||||||
|
return evt.Any(key, nil)
|
||||||
|
} else if v.Value.(EnumWrap).ValueRaw != nil {
|
||||||
|
return evt.Any(key, v.Value.(EnumWrap).ValueRaw)
|
||||||
|
} else {
|
||||||
|
return evt.Str(key, v.Value.(EnumWrap).ValueString)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return evt.Str(key, "(err)")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v MetaValue) MarshalJSON() ([]byte, error) {
|
||||||
|
str, err := v.SerializeValue()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return json.Marshal(string(v.DataType) + ":" + str)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *MetaValue) UnmarshalJSON(data []byte) error {
|
||||||
|
var str = ""
|
||||||
|
err := json.Unmarshal(data, &str)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
split := strings.SplitN(str, ":", 2)
|
||||||
|
if len(split) != 2 {
|
||||||
|
return errors.New("failed to decode MetaValue: '" + str + "'")
|
||||||
|
}
|
||||||
|
|
||||||
|
return v.Deserialize(split[1], metaDataType(split[0]))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v MetaValue) MarshalBSON() ([]byte, error) {
|
||||||
|
serval, err := v.SerializeValue()
|
||||||
|
if err != nil {
|
||||||
|
return nil, Wrap(err, "failed to bson-marshal MetaValue (serialize)").Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
// this is an kinda ugly hack - but serialization to mongodb and back can loose the correct type information....
|
||||||
|
bin, err := bson.Marshal(metaValueSerialization{
|
||||||
|
DataType: v.DataType,
|
||||||
|
Value: serval,
|
||||||
|
Raw: v.Value,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, Wrap(err, "failed to bson-marshal MetaValue (marshal)").Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
return bin, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *MetaValue) UnmarshalBSON(bytes []byte) error {
|
||||||
|
var serval metaValueSerialization
|
||||||
|
err := bson.Unmarshal(bytes, &serval)
|
||||||
|
if err != nil {
|
||||||
|
return Wrap(err, "failed to bson-unmarshal MetaValue (unmarshal)").Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
err = v.Deserialize(serval.Value, serval.DataType)
|
||||||
|
if err != nil {
|
||||||
|
return Wrap(err, "failed to deserialize MetaValue from bson").Str("raw", serval.Value).Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *MetaValue) Deserialize(value string, datatype metaDataType) error {
|
||||||
|
switch datatype {
|
||||||
|
case MDTString:
|
||||||
|
v.Value = value
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTID:
|
||||||
|
v.Value = deserializeIDWrap(value)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTAny:
|
||||||
|
v.Value = deserializeAnyWrap(value)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTStringPtr:
|
||||||
|
if len(value) <= 0 || (value[0] != '*' && value[0] != '#') {
|
||||||
|
return errors.New("Invalid StringPtr: " + value)
|
||||||
|
} else if value == "#" {
|
||||||
|
v.Value = nil
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
} else {
|
||||||
|
v.Value = langext.Ptr(value[1:])
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
case MDTInt:
|
||||||
|
pv, err := strconv.ParseInt(value, 10, 0)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = int(pv)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTInt8:
|
||||||
|
pv, err := strconv.ParseInt(value, 10, 8)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = int8(pv)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTInt16:
|
||||||
|
pv, err := strconv.ParseInt(value, 10, 16)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = int16(pv)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTInt32:
|
||||||
|
pv, err := strconv.ParseInt(value, 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = int32(pv)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTInt64:
|
||||||
|
pv, err := strconv.ParseInt(value, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = pv
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTFloat32:
|
||||||
|
pv, err := strconv.ParseFloat(value, 64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = float32(pv)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTFloat64:
|
||||||
|
pv, err := strconv.ParseFloat(value, 64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = pv
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTBool:
|
||||||
|
if value == "true" {
|
||||||
|
v.Value = true
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if value == "false" {
|
||||||
|
v.Value = false
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.New("invalid bool value: " + value)
|
||||||
|
case MDTBytes:
|
||||||
|
r, err := hex.DecodeString(value)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = r
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTObjectID:
|
||||||
|
r, err := primitive.ObjectIDFromHex(value)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = r
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTTime:
|
||||||
|
ps := strings.Split(value, "|")
|
||||||
|
if len(ps) != 2 {
|
||||||
|
return errors.New("invalid time.time: " + value)
|
||||||
|
}
|
||||||
|
p1, err := strconv.ParseInt(ps[0], 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
p2, err := strconv.ParseInt(ps[1], 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = time.Unix(p1, p2)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTDuration:
|
||||||
|
r, err := time.ParseDuration(value)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = r
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTStringArray:
|
||||||
|
if value == "#" {
|
||||||
|
v.Value = nil
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pj := make([]string, 0)
|
||||||
|
err := json.Unmarshal([]byte(value), &pj)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = pj
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTIntArray:
|
||||||
|
if value == "#" {
|
||||||
|
v.Value = nil
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pj := make([]int, 0)
|
||||||
|
err := json.Unmarshal([]byte(value), &pj)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = pj
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTInt32Array:
|
||||||
|
if value == "#" {
|
||||||
|
v.Value = nil
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pj := make([]int32, 0)
|
||||||
|
err := json.Unmarshal([]byte(value), &pj)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v.Value = pj
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTNil:
|
||||||
|
v.Value = nil
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
case MDTEnum:
|
||||||
|
v.Value = deserializeEnumWrap(value)
|
||||||
|
v.DataType = datatype
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.New("Unknown type: " + string(datatype))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v MetaValue) ValueString() string {
|
||||||
|
switch v.DataType {
|
||||||
|
case MDTString:
|
||||||
|
return v.Value.(string)
|
||||||
|
case MDTID:
|
||||||
|
return v.Value.(IDWrap).String()
|
||||||
|
case MDTAny:
|
||||||
|
return v.Value.(AnyWrap).String()
|
||||||
|
case MDTStringPtr:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "<<null>>"
|
||||||
|
}
|
||||||
|
return langext.CoalesceString(v.Value.(*string), "<<null>>")
|
||||||
|
case MDTInt:
|
||||||
|
return strconv.Itoa(v.Value.(int))
|
||||||
|
case MDTInt8:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int8)), 10)
|
||||||
|
case MDTInt16:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int16)), 10)
|
||||||
|
case MDTInt32:
|
||||||
|
return strconv.FormatInt(int64(v.Value.(int32)), 10)
|
||||||
|
case MDTInt64:
|
||||||
|
return strconv.FormatInt(v.Value.(int64), 10)
|
||||||
|
case MDTFloat32:
|
||||||
|
return strconv.FormatFloat(float64(v.Value.(float32)), 'g', 4, 32)
|
||||||
|
case MDTFloat64:
|
||||||
|
return strconv.FormatFloat(v.Value.(float64), 'g', 4, 64)
|
||||||
|
case MDTBool:
|
||||||
|
return fmt.Sprintf("%v", v.Value.(bool))
|
||||||
|
case MDTBytes:
|
||||||
|
return hex.EncodeToString(v.Value.([]byte))
|
||||||
|
case MDTObjectID:
|
||||||
|
return v.Value.(primitive.ObjectID).Hex()
|
||||||
|
case MDTTime:
|
||||||
|
return v.Value.(time.Time).Format(time.RFC3339Nano)
|
||||||
|
case MDTDuration:
|
||||||
|
return v.Value.(time.Duration).String()
|
||||||
|
case MDTStringArray:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "<<null>>"
|
||||||
|
}
|
||||||
|
r, err := json.MarshalIndent(v.Value.([]string), "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return "(err)"
|
||||||
|
}
|
||||||
|
return string(r)
|
||||||
|
case MDTIntArray:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "<<null>>"
|
||||||
|
}
|
||||||
|
r, err := json.MarshalIndent(v.Value.([]int), "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return "(err)"
|
||||||
|
}
|
||||||
|
return string(r)
|
||||||
|
case MDTInt32Array:
|
||||||
|
if langext.IsNil(v.Value) {
|
||||||
|
return "<<null>>"
|
||||||
|
}
|
||||||
|
r, err := json.MarshalIndent(v.Value.([]int32), "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return "(err)"
|
||||||
|
}
|
||||||
|
return string(r)
|
||||||
|
case MDTNil:
|
||||||
|
return "<<null>>"
|
||||||
|
case MDTEnum:
|
||||||
|
return v.Value.(EnumWrap).String()
|
||||||
|
}
|
||||||
|
return "(err)"
|
||||||
|
}
|
||||||
|
|
||||||
|
// rawValueForJson returns most-of-the-time the `Value` field
|
||||||
|
// but for some datatyes we do special processing
|
||||||
|
// all, so we can pluck the output value in json.Marshal without any suprises
|
||||||
|
func (v MetaValue) rawValueForJson() any {
|
||||||
|
if v.DataType == MDTAny {
|
||||||
|
if v.Value.(AnyWrap).IsNil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if v.Value.(AnyWrap).IsError {
|
||||||
|
return bson.M{"@error": true}
|
||||||
|
}
|
||||||
|
jsonobj := primitive.M{}
|
||||||
|
jsonarr := primitive.A{}
|
||||||
|
if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonobj); err == nil {
|
||||||
|
return jsonobj
|
||||||
|
} else if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonarr); err == nil {
|
||||||
|
return jsonarr
|
||||||
|
} else {
|
||||||
|
return bson.M{"type": v.Value.(AnyWrap).Type, "data": v.Value.(AnyWrap).Json}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v.DataType == MDTID {
|
||||||
|
if v.Value.(IDWrap).IsNil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return v.Value.(IDWrap).Value
|
||||||
|
}
|
||||||
|
if v.DataType == MDTBytes {
|
||||||
|
return hex.EncodeToString(v.Value.([]byte))
|
||||||
|
}
|
||||||
|
if v.DataType == MDTDuration {
|
||||||
|
return v.Value.(time.Duration).String()
|
||||||
|
}
|
||||||
|
if v.DataType == MDTTime {
|
||||||
|
return v.Value.(time.Time).Format(time.RFC3339Nano)
|
||||||
|
}
|
||||||
|
if v.DataType == MDTObjectID {
|
||||||
|
return v.Value.(primitive.ObjectID).Hex()
|
||||||
|
}
|
||||||
|
if v.DataType == MDTNil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if v.DataType == MDTEnum {
|
||||||
|
if v.Value.(EnumWrap).IsNil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if v.Value.(EnumWrap).ValueRaw != nil {
|
||||||
|
return v.Value.(EnumWrap).ValueRaw
|
||||||
|
}
|
||||||
|
return v.Value.(EnumWrap).ValueString
|
||||||
|
}
|
||||||
|
if v.DataType == MDTFloat32 {
|
||||||
|
if math.IsNaN(float64(v.Value.(float32))) {
|
||||||
|
return "float64::NaN"
|
||||||
|
} else if math.IsInf(float64(v.Value.(float32)), +1) {
|
||||||
|
return "float64::+inf"
|
||||||
|
} else if math.IsInf(float64(v.Value.(float32)), -1) {
|
||||||
|
return "float64::-inf"
|
||||||
|
} else {
|
||||||
|
return v.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v.DataType == MDTFloat64 {
|
||||||
|
if math.IsNaN(v.Value.(float64)) {
|
||||||
|
return "float64::NaN"
|
||||||
|
} else if math.IsInf(v.Value.(float64), +1) {
|
||||||
|
return "float64::+inf"
|
||||||
|
} else if math.IsInf(v.Value.(float64), -1) {
|
||||||
|
return "float64::-inf"
|
||||||
|
} else {
|
||||||
|
return v.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return v.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mm MetaMap) FormatOneLine(singleMaxLen int) string {
|
||||||
|
r := ""
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for key, val := range mm {
|
||||||
|
if i > 0 {
|
||||||
|
r += ", "
|
||||||
|
}
|
||||||
|
|
||||||
|
r += "\"" + key + "\""
|
||||||
|
r += ": "
|
||||||
|
r += "\"" + val.ShortString(singleMaxLen) + "\""
|
||||||
|
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mm MetaMap) FormatMultiLine(indentFront string, indentKeys string, maxLenValue int) string {
|
||||||
|
r := ""
|
||||||
|
|
||||||
|
r += indentFront + "{" + "\n"
|
||||||
|
for key, val := range mm {
|
||||||
|
if key == "gin.body" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
r += indentFront
|
||||||
|
r += indentKeys
|
||||||
|
r += "\"" + key + "\""
|
||||||
|
r += ": "
|
||||||
|
r += "\"" + val.ShortString(maxLenValue) + "\""
|
||||||
|
r += ",\n"
|
||||||
|
}
|
||||||
|
r += indentFront + "}"
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mm MetaMap) Any() bool {
|
||||||
|
return len(mm) > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mm MetaMap) Apply(evt *zerolog.Event, limitLen *int) *zerolog.Event {
|
||||||
|
for key, val := range mm {
|
||||||
|
evt = val.Apply(key, evt, limitLen)
|
||||||
|
}
|
||||||
|
return evt
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mm MetaMap) add(key string, mdtype metaDataType, val interface{}) {
|
||||||
|
if _, ok := mm[key]; !ok {
|
||||||
|
mm[key] = MetaValue{DataType: mdtype, Value: val}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for i := 2; ; i++ {
|
||||||
|
realkey := key + "-" + strconv.Itoa(i)
|
||||||
|
if _, ok := mm[realkey]; !ok {
|
||||||
|
mm[realkey] = MetaValue{DataType: mdtype, Value: val}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
14
exerr/stacktrace.go
Normal file
14
exerr/stacktrace.go
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
func callername(skip int) string {
|
||||||
|
pc := make([]uintptr, 15)
|
||||||
|
n := runtime.Callers(skip+2, pc)
|
||||||
|
frames := runtime.CallersFrames(pc[:n])
|
||||||
|
frame, _ := frames.Next()
|
||||||
|
return fmt.Sprintf("%s:%d %s", frame.File, frame.Line, frame.Function)
|
||||||
|
}
|
||||||
189
exerr/typeWrapper.go
Normal file
189
exerr/typeWrapper.go
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
package exerr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/enums"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
//
|
||||||
|
// These are wrapper objects, because for some metadata-types we need to serialize a bit more complex data
|
||||||
|
// (eg thy actual type for ID objects, or the json representation for any types)
|
||||||
|
//
|
||||||
|
|
||||||
|
type IDWrap struct {
|
||||||
|
Type string
|
||||||
|
Value string
|
||||||
|
IsNil bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newIDWrap(val fmt.Stringer) IDWrap {
|
||||||
|
t := fmt.Sprintf("%T", val)
|
||||||
|
arr := strings.Split(t, ".")
|
||||||
|
if len(arr) > 0 {
|
||||||
|
t = arr[len(arr)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
if langext.IsNil(val) {
|
||||||
|
return IDWrap{Type: t, Value: "", IsNil: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
v := val.String()
|
||||||
|
return IDWrap{Type: t, Value: v, IsNil: false}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w IDWrap) Serialize() string {
|
||||||
|
if w.IsNil {
|
||||||
|
return "!nil" + ":" + w.Type
|
||||||
|
}
|
||||||
|
return w.Type + ":" + w.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w IDWrap) String() string {
|
||||||
|
if w.IsNil {
|
||||||
|
return w.Type + "<<nil>>"
|
||||||
|
}
|
||||||
|
return w.Type + "(" + w.Value + ")"
|
||||||
|
}
|
||||||
|
|
||||||
|
func deserializeIDWrap(v string) IDWrap {
|
||||||
|
r := strings.SplitN(v, ":", 2)
|
||||||
|
|
||||||
|
if len(r) == 2 && r[0] == "!nil" {
|
||||||
|
return IDWrap{Type: r[1], Value: v, IsNil: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r) == 0 {
|
||||||
|
return IDWrap{}
|
||||||
|
} else if len(r) == 1 {
|
||||||
|
return IDWrap{Type: "", Value: v, IsNil: false}
|
||||||
|
} else {
|
||||||
|
return IDWrap{Type: r[0], Value: r[1], IsNil: false}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type AnyWrap struct {
|
||||||
|
Type string
|
||||||
|
Json string
|
||||||
|
IsError bool
|
||||||
|
IsNil bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newAnyWrap(val any) (result AnyWrap) {
|
||||||
|
result = AnyWrap{Type: "", Json: "", IsError: true, IsNil: false} // ensure a return in case of recover()
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
// send error should never crash our program
|
||||||
|
log.Error().Interface("err", err).Msg("Panic while trying to marshal anywrap ( bmerror.Interface )")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
t := fmt.Sprintf("%T", val)
|
||||||
|
|
||||||
|
if langext.IsNil(val) {
|
||||||
|
return AnyWrap{Type: t, Json: "", IsError: false, IsNil: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
j, err := json.Marshal(val)
|
||||||
|
if err == nil {
|
||||||
|
return AnyWrap{Type: t, Json: string(j), IsError: false, IsNil: false}
|
||||||
|
} else {
|
||||||
|
return AnyWrap{Type: t, Json: "", IsError: true, IsNil: false}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w AnyWrap) Serialize() string {
|
||||||
|
if w.IsError {
|
||||||
|
return "ERR" + ":" + w.Type + ":" + w.Json
|
||||||
|
} else if w.IsNil {
|
||||||
|
return "NIL" + ":" + w.Type + ":" + w.Json
|
||||||
|
} else {
|
||||||
|
return "OK" + ":" + w.Type + ":" + w.Json
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w AnyWrap) String() string {
|
||||||
|
if w.IsError {
|
||||||
|
return "(error)"
|
||||||
|
} else if w.IsNil {
|
||||||
|
return "(nil)"
|
||||||
|
} else {
|
||||||
|
return w.Json
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func deserializeAnyWrap(v string) AnyWrap {
|
||||||
|
r := strings.SplitN(v, ":", 3)
|
||||||
|
if len(r) != 3 {
|
||||||
|
return AnyWrap{IsError: true, Type: "", Json: "", IsNil: false}
|
||||||
|
} else {
|
||||||
|
if r[0] == "OK" {
|
||||||
|
return AnyWrap{IsError: false, Type: r[1], Json: r[2], IsNil: false}
|
||||||
|
} else if r[0] == "ERR" {
|
||||||
|
return AnyWrap{IsError: true, Type: r[1], Json: r[2], IsNil: false}
|
||||||
|
} else if r[0] == "NIL" {
|
||||||
|
return AnyWrap{IsError: false, Type: r[1], Json: "", IsNil: true}
|
||||||
|
} else {
|
||||||
|
return AnyWrap{IsError: true, Type: "", Json: "", IsNil: false}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type EnumWrap struct {
|
||||||
|
Type string
|
||||||
|
ValueString string
|
||||||
|
ValueRaw enums.Enum // `ValueRaw` is lost during serialization roundtrip
|
||||||
|
IsNil bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newEnumWrap(val enums.Enum) EnumWrap {
|
||||||
|
t := fmt.Sprintf("%T", val)
|
||||||
|
arr := strings.Split(t, ".")
|
||||||
|
if len(arr) > 0 {
|
||||||
|
t = arr[len(arr)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
if langext.IsNil(val) {
|
||||||
|
return EnumWrap{Type: t, ValueString: "", ValueRaw: val, IsNil: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
if enumstr, ok := val.(enums.StringEnum); ok {
|
||||||
|
return EnumWrap{Type: t, ValueString: enumstr.String(), ValueRaw: val, IsNil: false}
|
||||||
|
}
|
||||||
|
|
||||||
|
return EnumWrap{Type: t, ValueString: fmt.Sprintf("%v", val), ValueRaw: val, IsNil: false}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w EnumWrap) Serialize() string {
|
||||||
|
if w.IsNil {
|
||||||
|
return "!nil" + ":" + w.Type
|
||||||
|
}
|
||||||
|
return w.Type + ":" + w.ValueString
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w EnumWrap) String() string {
|
||||||
|
if w.IsNil {
|
||||||
|
return w.Type + "<<nil>>"
|
||||||
|
}
|
||||||
|
return "[" + w.Type + "] " + w.ValueString
|
||||||
|
}
|
||||||
|
|
||||||
|
func deserializeEnumWrap(v string) EnumWrap {
|
||||||
|
r := strings.SplitN(v, ":", 2)
|
||||||
|
|
||||||
|
if len(r) == 2 && r[0] == "!nil" {
|
||||||
|
return EnumWrap{Type: r[1], ValueString: v, ValueRaw: nil, IsNil: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r) == 0 {
|
||||||
|
return EnumWrap{}
|
||||||
|
} else if len(r) == 1 {
|
||||||
|
return EnumWrap{Type: "", ValueString: v, ValueRaw: nil, IsNil: false}
|
||||||
|
} else {
|
||||||
|
return EnumWrap{Type: r[0], ValueString: r[1], ValueRaw: nil, IsNil: false}
|
||||||
|
}
|
||||||
|
}
|
||||||
36
fsext/exists.go
Normal file
36
fsext/exists.go
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
package fsext
|
||||||
|
|
||||||
|
import "os"
|
||||||
|
|
||||||
|
func PathExists(fp string) (bool, error) {
|
||||||
|
_, err := os.Stat(fp)
|
||||||
|
if err == nil {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func FileExists(fp string) (bool, error) {
|
||||||
|
stat, err := os.Stat(fp)
|
||||||
|
if err == nil {
|
||||||
|
return !stat.IsDir(), nil
|
||||||
|
}
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func DirectoryExists(fp string) (bool, error) {
|
||||||
|
stat, err := os.Stat(fp)
|
||||||
|
if err == nil {
|
||||||
|
return stat.IsDir(), nil
|
||||||
|
}
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
59
ginext/appContext.go
Normal file
59
ginext/appContext.go
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AppContext struct {
|
||||||
|
inner context.Context
|
||||||
|
cancelFunc context.CancelFunc
|
||||||
|
cancelled bool
|
||||||
|
GinContext *gin.Context
|
||||||
|
}
|
||||||
|
|
||||||
|
func CreateAppContext(g *gin.Context, innerCtx context.Context, cancelFn context.CancelFunc) *AppContext {
|
||||||
|
for key, value := range g.Keys {
|
||||||
|
innerCtx = context.WithValue(innerCtx, key, value)
|
||||||
|
}
|
||||||
|
return &AppContext{
|
||||||
|
inner: innerCtx,
|
||||||
|
cancelFunc: cancelFn,
|
||||||
|
cancelled: false,
|
||||||
|
GinContext: g,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AppContext) Deadline() (deadline time.Time, ok bool) {
|
||||||
|
return ac.inner.Deadline()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AppContext) Done() <-chan struct{} {
|
||||||
|
return ac.inner.Done()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AppContext) Err() error {
|
||||||
|
return ac.inner.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AppContext) Value(key any) any {
|
||||||
|
return ac.inner.Value(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AppContext) Set(key, value any) {
|
||||||
|
ac.inner = context.WithValue(ac.inner, key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AppContext) Cancel() {
|
||||||
|
ac.cancelled = true
|
||||||
|
ac.cancelFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *AppContext) RequestURI() string {
|
||||||
|
if ac.GinContext != nil && ac.GinContext.Request != nil {
|
||||||
|
return ac.GinContext.Request.Method + " :: " + ac.GinContext.Request.RequestURI
|
||||||
|
} else {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
23
ginext/commonHandler.go
Normal file
23
ginext/commonHandler.go
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
func RedirectFound(newuri string) WHandlerFunc {
|
||||||
|
return func(pctx PreContext) HTTPResponse {
|
||||||
|
return Redirect(http.StatusFound, newuri)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func RedirectTemporary(newuri string) WHandlerFunc {
|
||||||
|
return func(pctx PreContext) HTTPResponse {
|
||||||
|
return Redirect(http.StatusTemporaryRedirect, newuri)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func RedirectPermanent(newuri string) WHandlerFunc {
|
||||||
|
return func(pctx PreContext) HTTPResponse {
|
||||||
|
return Redirect(http.StatusPermanentRedirect, newuri)
|
||||||
|
}
|
||||||
|
}
|
||||||
12
ginext/commonMiddlewares.go
Normal file
12
ginext/commonMiddlewares.go
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BodyBuffer(g *gin.Context) {
|
||||||
|
if g.Request.Body != nil {
|
||||||
|
g.Request.Body = dataext.NewBufferedReadCloser(g.Request.Body)
|
||||||
|
}
|
||||||
|
}
|
||||||
21
ginext/cors.go
Normal file
21
ginext/cors.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CorsMiddleware() gin.HandlerFunc {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
|
||||||
|
c.Writer.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||||
|
c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With")
|
||||||
|
c.Writer.Header().Set("Access-Control-Allow-Methods", "OPTIONS, GET, POST, PUT, PATCH, DELETE, COUNT")
|
||||||
|
|
||||||
|
if c.Request.Method == "OPTIONS" {
|
||||||
|
c.AbortWithStatus(http.StatusOK)
|
||||||
|
} else {
|
||||||
|
c.Next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
195
ginext/engine.go
Normal file
195
ginext/engine.go
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/mathext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GinWrapper struct {
|
||||||
|
engine *gin.Engine
|
||||||
|
suppressGinLogs bool
|
||||||
|
|
||||||
|
allowCors bool
|
||||||
|
ginDebug bool
|
||||||
|
bufferBody bool
|
||||||
|
requestTimeout time.Duration
|
||||||
|
listenerBeforeRequest []func(g *gin.Context)
|
||||||
|
listenerAfterRequest []func(g *gin.Context, resp HTTPResponse)
|
||||||
|
|
||||||
|
routeSpecs []ginRouteSpec
|
||||||
|
}
|
||||||
|
|
||||||
|
type ginRouteSpec struct {
|
||||||
|
Method string
|
||||||
|
URL string
|
||||||
|
Middlewares []string
|
||||||
|
Handler string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Options struct {
|
||||||
|
AllowCors *bool // Add cors handler to allow all CORS requests on the default http methods
|
||||||
|
GinDebug *bool // Set gin.debug to true (adds more logs)
|
||||||
|
BufferBody *bool // Buffers the input body stream, this way the ginext error handler can later include the whole request body
|
||||||
|
Timeout *time.Duration // The default handler timeout
|
||||||
|
ListenerBeforeRequest []func(g *gin.Context) // Register listener that are called before the handler method
|
||||||
|
ListenerAfterRequest []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEngine creates a new (wrapped) ginEngine
|
||||||
|
func NewEngine(opt Options) *GinWrapper {
|
||||||
|
engine := gin.New()
|
||||||
|
|
||||||
|
wrapper := &GinWrapper{
|
||||||
|
engine: engine,
|
||||||
|
suppressGinLogs: false,
|
||||||
|
allowCors: langext.Coalesce(opt.AllowCors, false),
|
||||||
|
ginDebug: langext.Coalesce(opt.GinDebug, true),
|
||||||
|
bufferBody: langext.Coalesce(opt.BufferBody, false),
|
||||||
|
requestTimeout: langext.Coalesce(opt.Timeout, 24*time.Hour),
|
||||||
|
listenerBeforeRequest: opt.ListenerBeforeRequest,
|
||||||
|
listenerAfterRequest: opt.ListenerAfterRequest,
|
||||||
|
}
|
||||||
|
|
||||||
|
engine.RedirectFixedPath = false
|
||||||
|
engine.RedirectTrailingSlash = false
|
||||||
|
|
||||||
|
if wrapper.allowCors {
|
||||||
|
engine.Use(CorsMiddleware())
|
||||||
|
}
|
||||||
|
|
||||||
|
// do not debug-print routes
|
||||||
|
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
|
||||||
|
|
||||||
|
if !wrapper.ginDebug {
|
||||||
|
gin.SetMode(gin.ReleaseMode)
|
||||||
|
|
||||||
|
ginlogger := gin.Logger()
|
||||||
|
engine.Use(func(context *gin.Context) {
|
||||||
|
if !wrapper.suppressGinLogs {
|
||||||
|
ginlogger(context)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
gin.SetMode(gin.DebugMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinWrapper) ListenAndServeHTTP(addr string, postInit func(port string)) (chan error, *http.Server) {
|
||||||
|
|
||||||
|
w.DebugPrintRoutes()
|
||||||
|
|
||||||
|
httpserver := &http.Server{
|
||||||
|
Addr: addr,
|
||||||
|
Handler: w.engine,
|
||||||
|
}
|
||||||
|
|
||||||
|
errChan := make(chan error)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
|
||||||
|
ln, err := net.Listen("tcp", httpserver.Addr)
|
||||||
|
if err != nil {
|
||||||
|
errChan <- err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, port, err := net.SplitHostPort(ln.Addr().String())
|
||||||
|
if err != nil {
|
||||||
|
errChan <- err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info().Str("address", httpserver.Addr).Msg("HTTP-Server started on http://localhost:" + port)
|
||||||
|
|
||||||
|
if postInit != nil {
|
||||||
|
postInit(port) // the net.Listener a few lines above is at this point actually already buffering requests
|
||||||
|
}
|
||||||
|
|
||||||
|
errChan <- httpserver.Serve(ln)
|
||||||
|
}()
|
||||||
|
|
||||||
|
return errChan, httpserver
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinWrapper) DebugPrintRoutes() {
|
||||||
|
if !w.ginDebug {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := make([][4]string, 0)
|
||||||
|
|
||||||
|
pad := [4]int{0, 0, 0, 0}
|
||||||
|
|
||||||
|
for _, spec := range w.routeSpecs {
|
||||||
|
|
||||||
|
line := [4]string{
|
||||||
|
spec.Method,
|
||||||
|
spec.URL,
|
||||||
|
strings.Join(langext.ArrMap(spec.Middlewares, w.cleanMiddlewareName), " -> "),
|
||||||
|
w.cleanMiddlewareName(spec.Handler),
|
||||||
|
}
|
||||||
|
|
||||||
|
lines = append(lines, line)
|
||||||
|
|
||||||
|
pad[0] = mathext.Max(pad[0], len(line[0]))
|
||||||
|
pad[1] = mathext.Max(pad[1], len(line[1]))
|
||||||
|
pad[2] = mathext.Max(pad[2], len(line[2]))
|
||||||
|
pad[3] = mathext.Max(pad[3], len(line[3]))
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Gin-Routes:\n")
|
||||||
|
fmt.Printf("{\n")
|
||||||
|
for _, line := range lines {
|
||||||
|
|
||||||
|
fmt.Printf(" %s %s --> %s --> %s\n",
|
||||||
|
langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2),
|
||||||
|
langext.StrPadRight(line[1], " ", pad[1]),
|
||||||
|
langext.StrPadRight(line[2], " ", pad[2]),
|
||||||
|
langext.StrPadRight(line[3], " ", pad[3]))
|
||||||
|
}
|
||||||
|
fmt.Printf("}\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinWrapper) cleanMiddlewareName(fname string) string {
|
||||||
|
|
||||||
|
funcSuffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`))
|
||||||
|
if match, ok := funcSuffix.MatchFirst(fname); ok {
|
||||||
|
fname = fname[:len(fname)-match.FullMatch().Length()]
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasSuffix(fname, ".(*GinRoutesWrapper).WithJSONFilter") {
|
||||||
|
fname = "[JSONFilter]"
|
||||||
|
}
|
||||||
|
|
||||||
|
if fname == "ginext.BodyBuffer" {
|
||||||
|
fname = "[BodyBuffer]"
|
||||||
|
}
|
||||||
|
|
||||||
|
skipPrefixes := []string{"api.(*Handler).", "api.", "ginext.", "handler.", "admin-app.", "employee-app.", "employer-app."}
|
||||||
|
for _, pfx := range skipPrefixes {
|
||||||
|
if strings.HasPrefix(fname, pfx) {
|
||||||
|
fname = fname[len(pfx):]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fname
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServeHTTP only used for unit tests
|
||||||
|
func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder {
|
||||||
|
respRec := httptest.NewRecorder()
|
||||||
|
w.engine.ServeHTTP(respRec, req)
|
||||||
|
return respRec
|
||||||
|
}
|
||||||
60
ginext/funcWrapper.go
Normal file
60
ginext/funcWrapper.go
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||||
|
)
|
||||||
|
|
||||||
|
type WHandlerFunc func(PreContext) HTTPResponse
|
||||||
|
|
||||||
|
func Wrap(w *GinWrapper, fn WHandlerFunc) gin.HandlerFunc {
|
||||||
|
|
||||||
|
return func(g *gin.Context) {
|
||||||
|
|
||||||
|
reqctx := g.Request.Context()
|
||||||
|
|
||||||
|
pctx := PreContext{
|
||||||
|
wrapper: w,
|
||||||
|
ginCtx: g,
|
||||||
|
persistantData: &preContextData{},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, lstr := range w.listenerBeforeRequest {
|
||||||
|
lstr(g)
|
||||||
|
}
|
||||||
|
|
||||||
|
wrap, stackTrace, panicObj := callPanicSafe(fn, pctx)
|
||||||
|
if panicObj != nil {
|
||||||
|
|
||||||
|
fmt.Printf("\n======== ======== STACKTRACE ======== ========\n%s\n======== ======== ======== ========\n\n", stackTrace)
|
||||||
|
|
||||||
|
err := exerr.
|
||||||
|
New(exerr.TypePanic, "Panic occured (in gin handler)").
|
||||||
|
Any("panicObj", panicObj).
|
||||||
|
Str("trace", stackTrace).
|
||||||
|
Build()
|
||||||
|
|
||||||
|
wrap = Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if g.Writer.Written() {
|
||||||
|
panic("Writing in WrapperFunc is not supported")
|
||||||
|
}
|
||||||
|
|
||||||
|
if pctx.persistantData.sessionObj != nil {
|
||||||
|
err := pctx.persistantData.sessionObj.Finish(reqctx, wrap)
|
||||||
|
if err != nil {
|
||||||
|
wrap = Error(exerr.Wrap(err, "Failed to finish session").Any("originalResponse", wrap).Build())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, lstr := range w.listenerAfterRequest {
|
||||||
|
lstr(g, wrap)
|
||||||
|
}
|
||||||
|
|
||||||
|
if reqctx.Err() == nil {
|
||||||
|
wrap.Write(g)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
190
ginext/preContext.go
Normal file
190
ginext/preContext.go
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/gin-gonic/gin/binding"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"io"
|
||||||
|
"runtime/debug"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type PreContext struct {
|
||||||
|
ginCtx *gin.Context
|
||||||
|
wrapper *GinWrapper
|
||||||
|
uri any
|
||||||
|
query any
|
||||||
|
body any
|
||||||
|
rawbody *[]byte
|
||||||
|
form any
|
||||||
|
header any
|
||||||
|
timeout *time.Duration
|
||||||
|
persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func
|
||||||
|
}
|
||||||
|
|
||||||
|
type preContextData struct {
|
||||||
|
sessionObj SessionObject
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx *PreContext) URI(uri any) *PreContext {
|
||||||
|
pctx.uri = uri
|
||||||
|
return pctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx *PreContext) Query(query any) *PreContext {
|
||||||
|
pctx.query = query
|
||||||
|
return pctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx *PreContext) Body(body any) *PreContext {
|
||||||
|
pctx.body = body
|
||||||
|
return pctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx *PreContext) RawBody(rawbody *[]byte) *PreContext {
|
||||||
|
pctx.rawbody = rawbody
|
||||||
|
return pctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx *PreContext) Form(form any) *PreContext {
|
||||||
|
pctx.form = form
|
||||||
|
return pctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx *PreContext) Header(header any) *PreContext {
|
||||||
|
pctx.header = header
|
||||||
|
return pctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx *PreContext) WithTimeout(to time.Duration) *PreContext {
|
||||||
|
pctx.timeout = &to
|
||||||
|
return pctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx *PreContext) WithSession(sessionObj SessionObject) *PreContext {
|
||||||
|
pctx.persistantData.sessionObj = sessionObj
|
||||||
|
return pctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
|
||||||
|
if pctx.uri != nil {
|
||||||
|
if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil {
|
||||||
|
err = exerr.Wrap(err, "Failed to read uri").
|
||||||
|
WithType(exerr.TypeBindFailURI).
|
||||||
|
Str("struct_type", fmt.Sprintf("%T", pctx.uri)).
|
||||||
|
Build()
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pctx.query != nil {
|
||||||
|
if err := pctx.ginCtx.ShouldBindQuery(pctx.query); err != nil {
|
||||||
|
err = exerr.Wrap(err, "Failed to read query").
|
||||||
|
WithType(exerr.TypeBindFailQuery).
|
||||||
|
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
|
||||||
|
Build()
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pctx.body != nil {
|
||||||
|
if pctx.ginCtx.ContentType() == "application/json" {
|
||||||
|
if err := pctx.ginCtx.ShouldBindJSON(pctx.body); err != nil {
|
||||||
|
err = exerr.Wrap(err, "Failed to read json-body").
|
||||||
|
WithType(exerr.TypeBindFailJSON).
|
||||||
|
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
|
||||||
|
Build()
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
|
||||||
|
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
|
||||||
|
Build()
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pctx.rawbody != nil {
|
||||||
|
if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok {
|
||||||
|
v, err := brc.BufferedAll()
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
*pctx.rawbody = v
|
||||||
|
} else {
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
_, err := io.Copy(buf, pctx.ginCtx.Request.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
*pctx.rawbody = buf.Bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pctx.form != nil {
|
||||||
|
if pctx.ginCtx.ContentType() == "multipart/form-data" {
|
||||||
|
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
|
||||||
|
err = exerr.Wrap(err, "Failed to read multipart-form").
|
||||||
|
WithType(exerr.TypeBindFailFormData).
|
||||||
|
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
|
||||||
|
Build()
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
} else if pctx.ginCtx.ContentType() == "application/x-www-form-urlencoded" {
|
||||||
|
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
|
||||||
|
err = exerr.Wrap(err, "Failed to read urlencoded-form").
|
||||||
|
WithType(exerr.TypeBindFailFormData).
|
||||||
|
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
|
||||||
|
Build()
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
|
||||||
|
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
|
||||||
|
Build()
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pctx.header != nil {
|
||||||
|
if err := pctx.ginCtx.ShouldBindHeader(pctx.header); err != nil {
|
||||||
|
err = exerr.Wrap(err, "Failed to read header").
|
||||||
|
WithType(exerr.TypeBindFailHeader).
|
||||||
|
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
|
||||||
|
Build()
|
||||||
|
return nil, nil, langext.Ptr(Error(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout))
|
||||||
|
|
||||||
|
actx := CreateAppContext(pctx.ginCtx, ictx, cancel)
|
||||||
|
|
||||||
|
if pctx.persistantData.sessionObj != nil {
|
||||||
|
err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx)
|
||||||
|
if err != nil {
|
||||||
|
actx.Cancel()
|
||||||
|
return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return actx, pctx.ginCtx, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func callPanicSafe(fn WHandlerFunc, pctx PreContext) (res HTTPResponse, stackTrace string, panicObj any) {
|
||||||
|
defer func() {
|
||||||
|
if rec := recover(); rec != nil {
|
||||||
|
res = nil
|
||||||
|
stackTrace = string(debug.Stack())
|
||||||
|
panicObj = rec
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
res = fn(pctx)
|
||||||
|
return res, "", nil
|
||||||
|
}
|
||||||
502
ginext/response.go
Normal file
502
ginext/response.go
Normal file
@@ -0,0 +1,502 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||||
|
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type cookieval struct {
|
||||||
|
name string
|
||||||
|
value string
|
||||||
|
maxAge int
|
||||||
|
path string
|
||||||
|
domain string
|
||||||
|
secure bool
|
||||||
|
httpOnly bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type headerval struct {
|
||||||
|
Key string
|
||||||
|
Val string
|
||||||
|
}
|
||||||
|
|
||||||
|
type HTTPResponse interface {
|
||||||
|
Write(g *gin.Context)
|
||||||
|
WithHeader(k string, v string) HTTPResponse
|
||||||
|
WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse
|
||||||
|
IsSuccess() bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type InspectableHTTPResponse interface {
|
||||||
|
HTTPResponse
|
||||||
|
|
||||||
|
Statuscode() int
|
||||||
|
BodyString(g *gin.Context) *string
|
||||||
|
ContentType() string
|
||||||
|
Headers() []string
|
||||||
|
}
|
||||||
|
|
||||||
|
type jsonHTTPResponse struct {
|
||||||
|
statusCode int
|
||||||
|
data any
|
||||||
|
headers []headerval
|
||||||
|
cookies []cookieval
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender {
|
||||||
|
var f *string
|
||||||
|
if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" {
|
||||||
|
f = &jsonfilter
|
||||||
|
}
|
||||||
|
return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) Write(g *gin.Context) {
|
||||||
|
for _, v := range j.headers {
|
||||||
|
g.Header(v.Key, v.Val)
|
||||||
|
}
|
||||||
|
for _, v := range j.cookies {
|
||||||
|
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
|
||||||
|
}
|
||||||
|
g.Render(j.statusCode, j.jsonRenderer(g))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||||
|
j.headers = append(j.headers, headerval{k, v})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
|
||||||
|
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) IsSuccess() bool {
|
||||||
|
return j.statusCode >= 200 && j.statusCode <= 399
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) Statuscode() int {
|
||||||
|
return j.statusCode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) BodyString(g *gin.Context) *string {
|
||||||
|
if str, err := j.jsonRenderer(g).RenderString(); err == nil {
|
||||||
|
return &str
|
||||||
|
} else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) ContentType() string {
|
||||||
|
return "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonHTTPResponse) Headers() []string {
|
||||||
|
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
|
||||||
|
}
|
||||||
|
|
||||||
|
type emptyHTTPResponse struct {
|
||||||
|
statusCode int
|
||||||
|
headers []headerval
|
||||||
|
cookies []cookieval
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j emptyHTTPResponse) Write(g *gin.Context) {
|
||||||
|
for _, v := range j.headers {
|
||||||
|
g.Header(v.Key, v.Val)
|
||||||
|
}
|
||||||
|
for _, v := range j.cookies {
|
||||||
|
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
|
||||||
|
}
|
||||||
|
g.Status(j.statusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||||
|
j.headers = append(j.headers, headerval{k, v})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
|
||||||
|
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j emptyHTTPResponse) IsSuccess() bool {
|
||||||
|
return j.statusCode >= 200 && j.statusCode <= 399
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j emptyHTTPResponse) Statuscode() int {
|
||||||
|
return j.statusCode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j emptyHTTPResponse) BodyString(*gin.Context) *string {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j emptyHTTPResponse) ContentType() string {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j emptyHTTPResponse) Headers() []string {
|
||||||
|
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
|
||||||
|
}
|
||||||
|
|
||||||
|
type textHTTPResponse struct {
|
||||||
|
statusCode int
|
||||||
|
data string
|
||||||
|
headers []headerval
|
||||||
|
cookies []cookieval
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j textHTTPResponse) Write(g *gin.Context) {
|
||||||
|
for _, v := range j.headers {
|
||||||
|
g.Header(v.Key, v.Val)
|
||||||
|
}
|
||||||
|
for _, v := range j.cookies {
|
||||||
|
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
|
||||||
|
}
|
||||||
|
g.String(j.statusCode, "%s", j.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||||
|
j.headers = append(j.headers, headerval{k, v})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
|
||||||
|
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j textHTTPResponse) IsSuccess() bool {
|
||||||
|
return j.statusCode >= 200 && j.statusCode <= 399
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j textHTTPResponse) Statuscode() int {
|
||||||
|
return j.statusCode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j textHTTPResponse) BodyString(*gin.Context) *string {
|
||||||
|
return langext.Ptr(j.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j textHTTPResponse) ContentType() string {
|
||||||
|
return "text/plain"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j textHTTPResponse) Headers() []string {
|
||||||
|
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
|
||||||
|
}
|
||||||
|
|
||||||
|
type dataHTTPResponse struct {
|
||||||
|
statusCode int
|
||||||
|
data []byte
|
||||||
|
contentType string
|
||||||
|
headers []headerval
|
||||||
|
cookies []cookieval
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j dataHTTPResponse) Write(g *gin.Context) {
|
||||||
|
for _, v := range j.headers {
|
||||||
|
g.Header(v.Key, v.Val)
|
||||||
|
}
|
||||||
|
for _, v := range j.cookies {
|
||||||
|
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
|
||||||
|
}
|
||||||
|
g.Data(j.statusCode, j.contentType, j.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||||
|
j.headers = append(j.headers, headerval{k, v})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
|
||||||
|
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j dataHTTPResponse) IsSuccess() bool {
|
||||||
|
return j.statusCode >= 200 && j.statusCode <= 399
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j dataHTTPResponse) Statuscode() int {
|
||||||
|
return j.statusCode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j dataHTTPResponse) BodyString(*gin.Context) *string {
|
||||||
|
return langext.Ptr(string(j.data))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j dataHTTPResponse) ContentType() string {
|
||||||
|
return j.contentType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j dataHTTPResponse) Headers() []string {
|
||||||
|
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
|
||||||
|
}
|
||||||
|
|
||||||
|
type fileHTTPResponse struct {
|
||||||
|
mimetype string
|
||||||
|
filepath string
|
||||||
|
filename *string
|
||||||
|
headers []headerval
|
||||||
|
cookies []cookieval
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j fileHTTPResponse) Write(g *gin.Context) {
|
||||||
|
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
|
||||||
|
if j.filename != nil {
|
||||||
|
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
|
||||||
|
|
||||||
|
}
|
||||||
|
for _, v := range j.headers {
|
||||||
|
g.Header(v.Key, v.Val)
|
||||||
|
}
|
||||||
|
for _, v := range j.cookies {
|
||||||
|
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
|
||||||
|
}
|
||||||
|
g.File(j.filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||||
|
j.headers = append(j.headers, headerval{k, v})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
|
||||||
|
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j fileHTTPResponse) IsSuccess() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j fileHTTPResponse) Statuscode() int {
|
||||||
|
return 200
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j fileHTTPResponse) BodyString(*gin.Context) *string {
|
||||||
|
data, err := os.ReadFile(j.filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return langext.Ptr(string(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j fileHTTPResponse) ContentType() string {
|
||||||
|
return j.mimetype
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j fileHTTPResponse) Headers() []string {
|
||||||
|
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
|
||||||
|
}
|
||||||
|
|
||||||
|
type downloadDataHTTPResponse struct {
|
||||||
|
statusCode int
|
||||||
|
mimetype string
|
||||||
|
data []byte
|
||||||
|
filename *string
|
||||||
|
headers []headerval
|
||||||
|
cookies []cookieval
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
|
||||||
|
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
|
||||||
|
if j.filename != nil {
|
||||||
|
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
|
||||||
|
}
|
||||||
|
for _, v := range j.headers {
|
||||||
|
g.Header(v.Key, v.Val)
|
||||||
|
}
|
||||||
|
for _, v := range j.cookies {
|
||||||
|
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
|
||||||
|
}
|
||||||
|
g.Data(j.statusCode, j.mimetype, j.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||||
|
j.headers = append(j.headers, headerval{k, v})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
|
||||||
|
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j downloadDataHTTPResponse) IsSuccess() bool {
|
||||||
|
return j.statusCode >= 200 && j.statusCode <= 399
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j downloadDataHTTPResponse) Statuscode() int {
|
||||||
|
return j.statusCode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string {
|
||||||
|
return langext.Ptr(string(j.data))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j downloadDataHTTPResponse) ContentType() string {
|
||||||
|
return j.mimetype
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j downloadDataHTTPResponse) Headers() []string {
|
||||||
|
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
|
||||||
|
}
|
||||||
|
|
||||||
|
type redirectHTTPResponse struct {
|
||||||
|
statusCode int
|
||||||
|
url string
|
||||||
|
headers []headerval
|
||||||
|
cookies []cookieval
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j redirectHTTPResponse) Write(g *gin.Context) {
|
||||||
|
for _, v := range j.headers {
|
||||||
|
g.Header(v.Key, v.Val)
|
||||||
|
}
|
||||||
|
for _, v := range j.cookies {
|
||||||
|
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
|
||||||
|
}
|
||||||
|
g.Redirect(j.statusCode, j.url)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
|
||||||
|
j.headers = append(j.headers, headerval{k, v})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
|
||||||
|
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j redirectHTTPResponse) IsSuccess() bool {
|
||||||
|
return j.statusCode >= 200 && j.statusCode <= 399
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j redirectHTTPResponse) Statuscode() int {
|
||||||
|
return j.statusCode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j redirectHTTPResponse) BodyString(*gin.Context) *string {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j redirectHTTPResponse) ContentType() string {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j redirectHTTPResponse) Headers() []string {
|
||||||
|
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
|
||||||
|
}
|
||||||
|
|
||||||
|
type jsonAPIErrResponse struct {
|
||||||
|
err *exerr.ExErr
|
||||||
|
headers []headerval
|
||||||
|
cookies []cookieval
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) Write(g *gin.Context) {
|
||||||
|
for _, v := range j.headers {
|
||||||
|
g.Header(v.Key, v.Val)
|
||||||
|
}
|
||||||
|
for _, v := range j.cookies {
|
||||||
|
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
|
||||||
|
}
|
||||||
|
j.err.Output(g)
|
||||||
|
|
||||||
|
j.err.CallListener(exerr.MethodOutput)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
|
||||||
|
j.headers = append(j.headers, headerval{k, v})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
|
||||||
|
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) IsSuccess() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) Statuscode() int {
|
||||||
|
return langext.Coalesce(j.err.RecursiveStatuscode(), 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) BodyString(*gin.Context) *string {
|
||||||
|
if str, err := j.err.ToDefaultAPIJson(); err == nil {
|
||||||
|
return &str
|
||||||
|
} else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) ContentType() string {
|
||||||
|
return "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) Headers() []string {
|
||||||
|
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j jsonAPIErrResponse) Unwrap() error {
|
||||||
|
return j.err
|
||||||
|
}
|
||||||
|
|
||||||
|
func Status(sc int) HTTPResponse {
|
||||||
|
return &emptyHTTPResponse{statusCode: sc}
|
||||||
|
}
|
||||||
|
|
||||||
|
func JSON(sc int, data any) HTTPResponse {
|
||||||
|
return &jsonHTTPResponse{statusCode: sc, data: data}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Data(sc int, contentType string, data []byte) HTTPResponse {
|
||||||
|
return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Text(sc int, data string) HTTPResponse {
|
||||||
|
return &textHTTPResponse{statusCode: sc, data: data}
|
||||||
|
}
|
||||||
|
|
||||||
|
func File(mimetype string, filepath string) HTTPResponse {
|
||||||
|
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Download(mimetype string, filepath string, filename string) HTTPResponse {
|
||||||
|
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
|
||||||
|
}
|
||||||
|
|
||||||
|
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
|
||||||
|
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Redirect(sc int, newURL string) HTTPResponse {
|
||||||
|
return &redirectHTTPResponse{statusCode: sc, url: newURL}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Error(e error) HTTPResponse {
|
||||||
|
return &jsonAPIErrResponse{
|
||||||
|
err: exerr.FromError(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
|
||||||
|
return &jsonAPIErrResponse{
|
||||||
|
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NotImplemented() HTTPResponse {
|
||||||
|
return Error(exerr.New(exerr.TypeNotImplemented, "").Build())
|
||||||
|
}
|
||||||
211
ginext/routes.go
Normal file
211
ginext/routes.go
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"reflect"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var anyMethods = []string{
|
||||||
|
http.MethodGet, http.MethodPost, http.MethodPut, http.MethodPatch,
|
||||||
|
http.MethodHead, http.MethodOptions, http.MethodDelete, http.MethodConnect,
|
||||||
|
http.MethodTrace,
|
||||||
|
}
|
||||||
|
|
||||||
|
type GinRoutesWrapper struct {
|
||||||
|
wrapper *GinWrapper
|
||||||
|
routes gin.IRouter
|
||||||
|
absPath string
|
||||||
|
defaultHandler []gin.HandlerFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
type GinRouteBuilder struct {
|
||||||
|
routes *GinRoutesWrapper
|
||||||
|
|
||||||
|
method string
|
||||||
|
relPath string
|
||||||
|
absPath string
|
||||||
|
handlers []gin.HandlerFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinWrapper) Routes() *GinRoutesWrapper {
|
||||||
|
return &GinRoutesWrapper{
|
||||||
|
wrapper: w,
|
||||||
|
routes: w.engine,
|
||||||
|
absPath: "",
|
||||||
|
defaultHandler: make([]gin.HandlerFunc, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) Group(relativePath string) *GinRoutesWrapper {
|
||||||
|
return &GinRoutesWrapper{
|
||||||
|
wrapper: w.wrapper,
|
||||||
|
routes: w.routes.Group(relativePath),
|
||||||
|
defaultHandler: langext.ArrCopy(w.defaultHandler),
|
||||||
|
absPath: joinPaths(w.absPath, relativePath),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper {
|
||||||
|
defHandler := langext.ArrCopy(w.defaultHandler)
|
||||||
|
defHandler = append(defHandler, middleware...)
|
||||||
|
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler, absPath: w.absPath}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper {
|
||||||
|
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route(http.MethodGet, relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) POST(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route(http.MethodPost, relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) DELETE(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route(http.MethodDelete, relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) PATCH(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route(http.MethodPatch, relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) PUT(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route(http.MethodPut, relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) OPTIONS(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route(http.MethodOptions, relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) HEAD(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route(http.MethodHead, relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) COUNT(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route("COUNT", relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) Any(relativePath string) *GinRouteBuilder {
|
||||||
|
return w._route("*", relativePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRoutesWrapper) _route(method string, relativePath string) *GinRouteBuilder {
|
||||||
|
return &GinRouteBuilder{
|
||||||
|
routes: w,
|
||||||
|
method: method,
|
||||||
|
relPath: relativePath,
|
||||||
|
absPath: joinPaths(w.absPath, relativePath),
|
||||||
|
handlers: langext.ArrCopy(w.defaultHandler),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
|
||||||
|
w.handlers = append(w.handlers, middleware...)
|
||||||
|
return w
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder {
|
||||||
|
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {
|
||||||
|
|
||||||
|
if w.routes.wrapper.bufferBody {
|
||||||
|
arr := make([]gin.HandlerFunc, 0, len(w.handlers)+1)
|
||||||
|
arr = append(arr, BodyBuffer)
|
||||||
|
arr = append(arr, w.handlers...)
|
||||||
|
w.handlers = arr
|
||||||
|
}
|
||||||
|
|
||||||
|
middlewareNames := langext.ArrMap(w.handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) })
|
||||||
|
handlerName := nameOfFunction(handler)
|
||||||
|
|
||||||
|
w.handlers = append(w.handlers, Wrap(w.routes.wrapper, handler))
|
||||||
|
|
||||||
|
methodName := w.method
|
||||||
|
|
||||||
|
if w.method == "*" {
|
||||||
|
methodName = "ANY"
|
||||||
|
for _, method := range anyMethods {
|
||||||
|
w.routes.routes.Handle(method, w.relPath, w.handlers...)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
w.routes.routes.Handle(w.method, w.relPath, w.handlers...)
|
||||||
|
}
|
||||||
|
|
||||||
|
w.routes.wrapper.routeSpecs = append(w.routes.wrapper.routeSpecs, ginRouteSpec{
|
||||||
|
Method: methodName,
|
||||||
|
URL: w.absPath,
|
||||||
|
Middlewares: middlewareNames,
|
||||||
|
Handler: handlerName,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinWrapper) NoRoute(handler WHandlerFunc) {
|
||||||
|
|
||||||
|
handlers := make([]gin.HandlerFunc, 0)
|
||||||
|
|
||||||
|
if w.bufferBody {
|
||||||
|
handlers = append(handlers, BodyBuffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
middlewareNames := langext.ArrMap(handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) })
|
||||||
|
handlerName := nameOfFunction(handler)
|
||||||
|
|
||||||
|
handlers = append(handlers, Wrap(w, handler))
|
||||||
|
|
||||||
|
w.engine.NoRoute(handlers...)
|
||||||
|
|
||||||
|
w.routeSpecs = append(w.routeSpecs, ginRouteSpec{
|
||||||
|
Method: "ANY",
|
||||||
|
URL: "[NO_ROUTE]",
|
||||||
|
Middlewares: middlewareNames,
|
||||||
|
Handler: handlerName,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func nameOfFunction(f any) string {
|
||||||
|
|
||||||
|
fname := runtime.FuncForPC(reflect.ValueOf(f).Pointer()).Name()
|
||||||
|
|
||||||
|
split := strings.Split(fname, "/")
|
||||||
|
if len(split) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
fname = split[len(split)-1]
|
||||||
|
|
||||||
|
// https://stackoverflow.com/a/32925345/1761622
|
||||||
|
if strings.HasSuffix(fname, "-fm") {
|
||||||
|
fname = fname[:len(fname)-len("-fm")]
|
||||||
|
}
|
||||||
|
|
||||||
|
return fname
|
||||||
|
}
|
||||||
|
|
||||||
|
// joinPaths is copied verbatim from gin@v1.9.1/gin.go
|
||||||
|
func joinPaths(absolutePath, relativePath string) string {
|
||||||
|
if relativePath == "" {
|
||||||
|
return absolutePath
|
||||||
|
}
|
||||||
|
|
||||||
|
finalPath := path.Join(absolutePath, relativePath)
|
||||||
|
if lastChar(relativePath) == '/' && lastChar(finalPath) != '/' {
|
||||||
|
return finalPath + "/"
|
||||||
|
}
|
||||||
|
return finalPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func lastChar(str string) uint8 {
|
||||||
|
if str == "" {
|
||||||
|
panic("The length of the string can't be 0")
|
||||||
|
}
|
||||||
|
return str[len(str)-1]
|
||||||
|
}
|
||||||
11
ginext/session.go
Normal file
11
ginext/session.go
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package ginext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SessionObject interface {
|
||||||
|
Init(g *gin.Context, ctx *AppContext) error
|
||||||
|
Finish(ctx context.Context, resp HTTPResponse) error
|
||||||
|
}
|
||||||
67
go.mod
67
go.mod
@@ -1,32 +1,57 @@
|
|||||||
module gogs.mikescher.com/BlackForestBytes/goext
|
module gogs.mikescher.com/BlackForestBytes/goext
|
||||||
|
|
||||||
go 1.19
|
go 1.21
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/golang/snappy v0.0.4
|
github.com/gin-gonic/gin v1.9.1
|
||||||
github.com/google/go-cmp v0.5.9
|
github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.-
|
||||||
github.com/jmoiron/sqlx v1.3.5
|
github.com/jmoiron/sqlx v1.3.5
|
||||||
github.com/klauspost/compress v1.16.6
|
github.com/rs/xid v1.5.0
|
||||||
github.com/kr/pretty v0.1.0
|
github.com/rs/zerolog v1.32.0
|
||||||
github.com/montanaflynn/stats v0.7.1
|
go.mongodb.org/mongo-driver v1.14.0
|
||||||
github.com/pkg/errors v0.9.1
|
golang.org/x/crypto v0.19.0
|
||||||
github.com/stretchr/testify v1.8.4
|
golang.org/x/sys v0.17.0
|
||||||
github.com/tidwall/pretty v1.0.0
|
golang.org/x/term v0.17.0
|
||||||
github.com/xdg-go/scram v1.1.2
|
|
||||||
github.com/xdg-go/stringprep v1.0.4
|
|
||||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a
|
|
||||||
go.mongodb.org/mongo-driver v1.11.7
|
|
||||||
golang.org/x/crypto v0.10.0
|
|
||||||
golang.org/x/sync v0.3.0
|
|
||||||
golang.org/x/sys v0.9.0
|
|
||||||
golang.org/x/term v0.9.0
|
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/bytedance/sonic v1.11.0 // indirect
|
||||||
github.com/kr/text v0.1.0 // indirect
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/chenzhuoyu/iasm v0.9.1 // indirect
|
||||||
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||||
|
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||||
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
|
github.com/go-playground/validator/v10 v10.18.0 // indirect
|
||||||
|
github.com/goccy/go-json v0.10.2 // indirect
|
||||||
|
github.com/golang/snappy v0.0.4 // indirect
|
||||||
|
github.com/google/uuid v1.5.0 // indirect
|
||||||
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
|
github.com/klauspost/compress v1.17.6 // indirect
|
||||||
|
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
|
||||||
|
github.com/leodido/go-urn v1.4.0 // indirect
|
||||||
|
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
|
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.1.1 // indirect
|
||||||
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
|
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||||
golang.org/x/text v0.10.0 // indirect
|
github.com/xdg-go/scram v1.1.2 // indirect
|
||||||
|
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||||
|
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
|
||||||
|
golang.org/x/arch v0.7.0 // indirect
|
||||||
|
golang.org/x/net v0.21.0 // indirect
|
||||||
|
golang.org/x/sync v0.6.0 // indirect
|
||||||
|
golang.org/x/text v0.14.0 // indirect
|
||||||
|
google.golang.org/protobuf v1.32.0 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
modernc.org/libc v1.37.6 // indirect
|
||||||
|
modernc.org/mathutil v1.6.0 // indirect
|
||||||
|
modernc.org/memory v1.7.2 // indirect
|
||||||
|
modernc.org/sqlite v1.28.0 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
180
go.sum
180
go.sum
@@ -1,72 +1,168 @@
|
|||||||
|
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||||
|
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
||||||
|
github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
|
||||||
|
github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
||||||
|
github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo=
|
||||||
|
github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
||||||
|
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||||
|
github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
|
||||||
|
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||||
|
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||||
|
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||||
|
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||||
|
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
||||||
|
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
||||||
|
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
|
||||||
|
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
|
||||||
|
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||||
|
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
|
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||||
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
|
github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE=
|
||||||
|
github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||||
|
github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74=
|
||||||
|
github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||||
|
github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U=
|
||||||
|
github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||||
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
||||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
|
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||||
|
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
|
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||||
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
||||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
|
||||||
|
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
|
||||||
|
github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU=
|
||||||
|
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
||||||
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
||||||
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||||
github.com/klauspost/compress v1.16.6 h1:91SKEy4K37vkp255cJ8QesJhjyRO0hn9i9G0GoUwLsk=
|
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
|
||||||
github.com/klauspost/compress v1.16.6/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||||
|
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
|
||||||
|
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||||
|
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||||
|
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||||
|
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||||
|
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||||
|
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||||
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
|
||||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
|
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||||
|
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||||
|
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||||
|
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
|
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
|
||||||
|
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||||
|
github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
|
||||||
|
github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||||
|
github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0=
|
||||||
|
github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
|
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||||
|
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||||
|
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
|
||||||
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
|
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
|
||||||
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
||||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
|
||||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
|
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
|
||||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
|
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
go.mongodb.org/mongo-driver v1.11.7 h1:LIwYxASDLGUg/8wOhgOOZhX8tQa/9tgZPgzZoVqJvcs=
|
go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk=
|
||||||
go.mongodb.org/mongo-driver v1.11.7/go.mod h1:G9TgswdsWjX4tmDA5zfs2+6AEPpYJwqblyjsfuh8oXY=
|
go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo=
|
||||||
|
go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80=
|
||||||
|
go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
|
||||||
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
|
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
|
||||||
|
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||||
golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM=
|
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
|
||||||
golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I=
|
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
|
||||||
|
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
||||||
|
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||||
|
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
|
||||||
|
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
|
||||||
|
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
|
||||||
|
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
|
||||||
|
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||||
|
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
|
||||||
|
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
|
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
|
||||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
@@ -74,27 +170,49 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
|
||||||
|
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
|
||||||
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.9.0 h1:GRRCnKYhdQrD8kfRAdQ6Zcw1P0OcELxGLKJvtjVMZ28=
|
golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE=
|
||||||
golang.org/x/term v0.9.0/go.mod h1:M6DEAAIenWoTxdKrOltXcmDY3rSplQUkrvaDU5FcQyo=
|
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
|
||||||
|
golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U=
|
||||||
|
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||||
golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||||
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
|
||||||
|
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||||
|
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
|
||||||
|
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
modernc.org/libc v1.37.6 h1:orZH3c5wmhIQFTXF+Nt+eeauyd+ZIt2BX6ARe+kD+aw=
|
||||||
|
modernc.org/libc v1.37.6/go.mod h1:YAXkAZ8ktnkCKaN9sw/UDeUVkGYJ/YquGO4FTi5nmHE=
|
||||||
|
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
|
||||||
|
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
|
||||||
|
modernc.org/memory v1.7.2 h1:Klh90S215mmH8c9gO98QxQFsY+W451E8AnzjoE2ee1E=
|
||||||
|
modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E=
|
||||||
|
modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ=
|
||||||
|
modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0=
|
||||||
|
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||||
|
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
package goext
|
package goext
|
||||||
|
|
||||||
const GoextVersion = "0.0.166"
|
const GoextVersion = "0.0.391"
|
||||||
|
|
||||||
const GoextVersionTimestamp = "2023-06-19T10:25:41+0200"
|
const GoextVersionTimestamp = "2024-02-21T16:18:04+0100"
|
||||||
|
|||||||
@@ -156,7 +156,6 @@ import (
|
|||||||
// an error.
|
// an error.
|
||||||
func Marshal(v any) ([]byte, error) {
|
func Marshal(v any) ([]byte, error) {
|
||||||
e := newEncodeState()
|
e := newEncodeState()
|
||||||
defer encodeStatePool.Put(e)
|
|
||||||
|
|
||||||
err := e.marshal(v, encOpts{escapeHTML: true})
|
err := e.marshal(v, encOpts{escapeHTML: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -164,6 +163,8 @@ func Marshal(v any) ([]byte, error) {
|
|||||||
}
|
}
|
||||||
buf := append([]byte(nil), e.Bytes()...)
|
buf := append([]byte(nil), e.Bytes()...)
|
||||||
|
|
||||||
|
encodeStatePool.Put(e)
|
||||||
|
|
||||||
return buf, nil
|
return buf, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -174,9 +175,9 @@ type IndentOpt struct {
|
|||||||
|
|
||||||
// MarshalSafeCollections is like Marshal except it will marshal nil maps and
|
// MarshalSafeCollections is like Marshal except it will marshal nil maps and
|
||||||
// slices as '{}' and '[]' respectfully instead of 'null'
|
// slices as '{}' and '[]' respectfully instead of 'null'
|
||||||
func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt) ([]byte, error) {
|
func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt, filter *string) ([]byte, error) {
|
||||||
e := &encodeState{}
|
e := &encodeState{}
|
||||||
err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps})
|
err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps, filter: filter})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -393,6 +394,9 @@ type encOpts struct {
|
|||||||
nilSafeSlices bool
|
nilSafeSlices bool
|
||||||
// nilSafeMaps marshals a nil maps '{}' instead of 'null'
|
// nilSafeMaps marshals a nil maps '{}' instead of 'null'
|
||||||
nilSafeMaps bool
|
nilSafeMaps bool
|
||||||
|
// filter matches jsonfilter tag of struct
|
||||||
|
// marshals if no jsonfilter is set or otherwise if jsonfilter has the filter value
|
||||||
|
filter *string
|
||||||
}
|
}
|
||||||
|
|
||||||
type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts)
|
type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts)
|
||||||
@@ -777,6 +781,8 @@ FieldLoop:
|
|||||||
|
|
||||||
if f.omitEmpty && isEmptyValue(fv) {
|
if f.omitEmpty && isEmptyValue(fv) {
|
||||||
continue
|
continue
|
||||||
|
} else if opts.filter != nil && len(f.jsonfilter) > 0 && !f.jsonfilter.Contains(*opts.filter) {
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
e.WriteByte(next)
|
e.WriteByte(next)
|
||||||
next = ','
|
next = ','
|
||||||
@@ -1220,15 +1226,28 @@ type field struct {
|
|||||||
nameNonEsc string // `"` + name + `":`
|
nameNonEsc string // `"` + name + `":`
|
||||||
nameEscHTML string // `"` + HTMLEscape(name) + `":`
|
nameEscHTML string // `"` + HTMLEscape(name) + `":`
|
||||||
|
|
||||||
tag bool
|
tag bool
|
||||||
index []int
|
index []int
|
||||||
typ reflect.Type
|
typ reflect.Type
|
||||||
omitEmpty bool
|
omitEmpty bool
|
||||||
quoted bool
|
jsonfilter jsonfilter
|
||||||
|
quoted bool
|
||||||
|
|
||||||
encoder encoderFunc
|
encoder encoderFunc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// jsonfilter stores the value of the jsonfilter struct tag
|
||||||
|
type jsonfilter []string
|
||||||
|
|
||||||
|
func (j jsonfilter) Contains(t string) bool {
|
||||||
|
for _, tag := range j {
|
||||||
|
if t == tag {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// byIndex sorts field by index sequence.
|
// byIndex sorts field by index sequence.
|
||||||
type byIndex []field
|
type byIndex []field
|
||||||
|
|
||||||
@@ -1304,6 +1323,13 @@ func typeFields(t reflect.Type) structFields {
|
|||||||
if !isValidTag(name) {
|
if !isValidTag(name) {
|
||||||
name = ""
|
name = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var jsonfilter []string
|
||||||
|
jsonfilterTag := sf.Tag.Get("jsonfilter")
|
||||||
|
if jsonfilterTag != "" && jsonfilterTag != "-" {
|
||||||
|
jsonfilter = strings.Split(jsonfilterTag, ",")
|
||||||
|
}
|
||||||
|
|
||||||
index := make([]int, len(f.index)+1)
|
index := make([]int, len(f.index)+1)
|
||||||
copy(index, f.index)
|
copy(index, f.index)
|
||||||
index[len(f.index)] = i
|
index[len(f.index)] = i
|
||||||
@@ -1334,12 +1360,13 @@ func typeFields(t reflect.Type) structFields {
|
|||||||
name = sf.Name
|
name = sf.Name
|
||||||
}
|
}
|
||||||
field := field{
|
field := field{
|
||||||
name: name,
|
name: name,
|
||||||
tag: tagged,
|
tag: tagged,
|
||||||
index: index,
|
index: index,
|
||||||
typ: ft,
|
typ: ft,
|
||||||
omitEmpty: opts.Contains("omitempty"),
|
omitEmpty: opts.Contains("omitempty"),
|
||||||
quoted: quoted,
|
jsonfilter: jsonfilter,
|
||||||
|
quoted: quoted,
|
||||||
}
|
}
|
||||||
field.nameBytes = []byte(field.name)
|
field.nameBytes = []byte(field.name)
|
||||||
field.equalFold = foldFunc(field.nameBytes)
|
field.equalFold = foldFunc(field.nameBytes)
|
||||||
|
|||||||
@@ -1253,6 +1253,10 @@ func TestMarshalSafeCollections(t *testing.T) {
|
|||||||
nilMapStruct struct {
|
nilMapStruct struct {
|
||||||
NilMap map[string]interface{} `json:"nil_map"`
|
NilMap map[string]interface{} `json:"nil_map"`
|
||||||
}
|
}
|
||||||
|
testWithFilter struct {
|
||||||
|
Test1 string `json:"test1" jsonfilter:"FILTERONE"`
|
||||||
|
Test2 string `json:"test2" jsonfilter:"FILTERTWO"`
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
@@ -1271,10 +1275,12 @@ func TestMarshalSafeCollections(t *testing.T) {
|
|||||||
{map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}"},
|
{map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}"},
|
||||||
{pNilMap, "null"},
|
{pNilMap, "null"},
|
||||||
{nilMapStruct{}, "{\"nil_map\":{}}"},
|
{nilMapStruct{}, "{\"nil_map\":{}}"},
|
||||||
|
{testWithFilter{}, "{\"test1\":\"\"}"},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
filter := "FILTERONE"
|
||||||
for i, tt := range tests {
|
for i, tt := range tests {
|
||||||
b, err := MarshalSafeCollections(tt.in, true, true, nil)
|
b, err := MarshalSafeCollections(tt.in, true, true, nil, &filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("test %d, unexpected failure: %v", i, err)
|
t.Errorf("test %d, unexpected failure: %v", i, err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -97,7 +97,10 @@ func equalFoldRight(s, t []byte) bool {
|
|||||||
t = t[size:]
|
t = t[size:]
|
||||||
|
|
||||||
}
|
}
|
||||||
return len(t) == 0
|
if len(t) > 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// asciiEqualFold is a specialization of bytes.EqualFold for use when
|
// asciiEqualFold is a specialization of bytes.EqualFold for use when
|
||||||
|
|||||||
@@ -52,7 +52,9 @@ func TestFold(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestFoldAgainstUnicode(t *testing.T) {
|
func TestFoldAgainstUnicode(t *testing.T) {
|
||||||
var buf1, buf2 []byte
|
const bufSize = 5
|
||||||
|
buf1 := make([]byte, 0, bufSize)
|
||||||
|
buf2 := make([]byte, 0, bufSize)
|
||||||
var runes []rune
|
var runes []rune
|
||||||
for i := 0x20; i <= 0x7f; i++ {
|
for i := 0x20; i <= 0x7f; i++ {
|
||||||
runes = append(runes, rune(i))
|
runes = append(runes, rune(i))
|
||||||
@@ -94,8 +96,12 @@ func TestFoldAgainstUnicode(t *testing.T) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for _, r2 := range runes {
|
for _, r2 := range runes {
|
||||||
buf1 = append(utf8.AppendRune(append(buf1[:0], 'x'), r), 'x')
|
buf1 := append(buf1[:0], 'x')
|
||||||
buf2 = append(utf8.AppendRune(append(buf2[:0], 'x'), r2), 'x')
|
buf2 := append(buf2[:0], 'x')
|
||||||
|
buf1 = buf1[:1+utf8.EncodeRune(buf1[1:bufSize], r)]
|
||||||
|
buf2 = buf2[:1+utf8.EncodeRune(buf2[1:bufSize], r2)]
|
||||||
|
buf1 = append(buf1, 'x')
|
||||||
|
buf2 = append(buf2, 'x')
|
||||||
want := bytes.EqualFold(buf1, buf2)
|
want := bytes.EqualFold(buf1, buf2)
|
||||||
if got := ff.fold(buf1, buf2); got != want {
|
if got := ff.fold(buf1, buf2); got != want {
|
||||||
t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want)
|
t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want)
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ type GoJsonRender struct {
|
|||||||
NilSafeSlices bool
|
NilSafeSlices bool
|
||||||
NilSafeMaps bool
|
NilSafeMaps bool
|
||||||
Indent *IndentOpt
|
Indent *IndentOpt
|
||||||
|
Filter *string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r GoJsonRender) Render(w http.ResponseWriter) error {
|
func (r GoJsonRender) Render(w http.ResponseWriter) error {
|
||||||
@@ -25,7 +26,7 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error {
|
|||||||
header["Content-Type"] = []string{"application/json; charset=utf-8"}
|
header["Content-Type"] = []string{"application/json; charset=utf-8"}
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent)
|
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
@@ -36,6 +37,14 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r GoJsonRender) RenderString() (string, error) {
|
||||||
|
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return string(jsonBytes), nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r GoJsonRender) WriteContentType(w http.ResponseWriter) {
|
func (r GoJsonRender) WriteContentType(w http.ResponseWriter) {
|
||||||
header := w.Header()
|
header := w.Header()
|
||||||
if val := header["Content-Type"]; len(val) == 0 {
|
if val := header["Content-Type"]; len(val) == 0 {
|
||||||
|
|||||||
@@ -116,3 +116,18 @@ func TestNumberIsValid(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func BenchmarkNumberIsValid(b *testing.B) {
|
||||||
|
s := "-61657.61667E+61673"
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
isValidNumber(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkNumberIsValidRegexp(b *testing.B) {
|
||||||
|
var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
|
||||||
|
s := "-61657.61667E+61673"
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
jsonNumberRegexp.MatchString(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -594,7 +594,7 @@ func (s *scanner) error(c byte, context string) int {
|
|||||||
return scanError
|
return scanError
|
||||||
}
|
}
|
||||||
|
|
||||||
// quoteChar formats c as a quoted character literal.
|
// quoteChar formats c as a quoted character literal
|
||||||
func quoteChar(c byte) string {
|
func quoteChar(c byte) string {
|
||||||
// special cases - different from quoted strings
|
// special cases - different from quoted strings
|
||||||
if c == '\'' {
|
if c == '\'' {
|
||||||
|
|||||||
@@ -179,11 +179,9 @@ func nonSpace(b []byte) bool {
|
|||||||
|
|
||||||
// An Encoder writes JSON values to an output stream.
|
// An Encoder writes JSON values to an output stream.
|
||||||
type Encoder struct {
|
type Encoder struct {
|
||||||
w io.Writer
|
w io.Writer
|
||||||
err error
|
err error
|
||||||
escapeHTML bool
|
escapeHTML bool
|
||||||
nilSafeSlices bool
|
|
||||||
nilSafeMaps bool
|
|
||||||
|
|
||||||
indentBuf *bytes.Buffer
|
indentBuf *bytes.Buffer
|
||||||
indentPrefix string
|
indentPrefix string
|
||||||
@@ -204,11 +202,8 @@ func (enc *Encoder) Encode(v any) error {
|
|||||||
if enc.err != nil {
|
if enc.err != nil {
|
||||||
return enc.err
|
return enc.err
|
||||||
}
|
}
|
||||||
|
|
||||||
e := newEncodeState()
|
e := newEncodeState()
|
||||||
defer encodeStatePool.Put(e)
|
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
|
||||||
|
|
||||||
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML, nilSafeMaps: enc.nilSafeMaps, nilSafeSlices: enc.nilSafeSlices})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -236,6 +231,7 @@ func (enc *Encoder) Encode(v any) error {
|
|||||||
if _, err = enc.w.Write(b); err != nil {
|
if _, err = enc.w.Write(b); err != nil {
|
||||||
enc.err = err
|
enc.err = err
|
||||||
}
|
}
|
||||||
|
encodeStatePool.Put(e)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -247,13 +243,6 @@ func (enc *Encoder) SetIndent(prefix, indent string) {
|
|||||||
enc.indentValue = indent
|
enc.indentValue = indent
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetNilSafeCollection specifies whether to represent nil slices and maps as
|
|
||||||
// '[]' or '{}' respectfully (flag on) instead of 'null' (default) when marshaling json.
|
|
||||||
func (enc *Encoder) SetNilSafeCollection(nilSafeSlices bool, nilSafeMaps bool) {
|
|
||||||
enc.nilSafeSlices = nilSafeSlices
|
|
||||||
enc.nilSafeMaps = nilSafeMaps
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetEscapeHTML specifies whether problematic HTML characters
|
// SetEscapeHTML specifies whether problematic HTML characters
|
||||||
// should be escaped inside JSON quoted strings.
|
// should be escaped inside JSON quoted strings.
|
||||||
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e
|
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"reflect"
|
"reflect"
|
||||||
"runtime/debug"
|
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
@@ -42,7 +41,7 @@ false
|
|||||||
|
|
||||||
func TestEncoder(t *testing.T) {
|
func TestEncoder(t *testing.T) {
|
||||||
for i := 0; i <= len(streamTest); i++ {
|
for i := 0; i <= len(streamTest); i++ {
|
||||||
var buf strings.Builder
|
var buf bytes.Buffer
|
||||||
enc := NewEncoder(&buf)
|
enc := NewEncoder(&buf)
|
||||||
// Check that enc.SetIndent("", "") turns off indentation.
|
// Check that enc.SetIndent("", "") turns off indentation.
|
||||||
enc.SetIndent(">", ".")
|
enc.SetIndent(">", ".")
|
||||||
@@ -60,43 +59,6 @@ func TestEncoder(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEncoderErrorAndReuseEncodeState(t *testing.T) {
|
|
||||||
// Disable the GC temporarily to prevent encodeState's in Pool being cleaned away during the test.
|
|
||||||
percent := debug.SetGCPercent(-1)
|
|
||||||
defer debug.SetGCPercent(percent)
|
|
||||||
|
|
||||||
// Trigger an error in Marshal with cyclic data.
|
|
||||||
type Dummy struct {
|
|
||||||
Name string
|
|
||||||
Next *Dummy
|
|
||||||
}
|
|
||||||
dummy := Dummy{Name: "Dummy"}
|
|
||||||
dummy.Next = &dummy
|
|
||||||
|
|
||||||
var buf bytes.Buffer
|
|
||||||
enc := NewEncoder(&buf)
|
|
||||||
if err := enc.Encode(dummy); err == nil {
|
|
||||||
t.Errorf("Encode(dummy) == nil; want error")
|
|
||||||
}
|
|
||||||
|
|
||||||
type Data struct {
|
|
||||||
A string
|
|
||||||
I int
|
|
||||||
}
|
|
||||||
data := Data{A: "a", I: 1}
|
|
||||||
if err := enc.Encode(data); err != nil {
|
|
||||||
t.Errorf("Marshal(%v) = %v", data, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var data2 Data
|
|
||||||
if err := Unmarshal(buf.Bytes(), &data2); err != nil {
|
|
||||||
t.Errorf("Unmarshal(%v) = %v", data2, err)
|
|
||||||
}
|
|
||||||
if data2 != data {
|
|
||||||
t.Errorf("expect: %v, but get: %v", data, data2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var streamEncodedIndent = `0.1
|
var streamEncodedIndent = `0.1
|
||||||
"hello"
|
"hello"
|
||||||
null
|
null
|
||||||
@@ -115,7 +77,7 @@ false
|
|||||||
`
|
`
|
||||||
|
|
||||||
func TestEncoderIndent(t *testing.T) {
|
func TestEncoderIndent(t *testing.T) {
|
||||||
var buf strings.Builder
|
var buf bytes.Buffer
|
||||||
enc := NewEncoder(&buf)
|
enc := NewEncoder(&buf)
|
||||||
enc.SetIndent(">", ".")
|
enc.SetIndent(">", ".")
|
||||||
for _, v := range streamTest {
|
for _, v := range streamTest {
|
||||||
@@ -185,7 +147,7 @@ func TestEncoderSetEscapeHTML(t *testing.T) {
|
|||||||
`{"bar":"\"<html>foobar</html>\""}`,
|
`{"bar":"\"<html>foobar</html>\""}`,
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
var buf strings.Builder
|
var buf bytes.Buffer
|
||||||
enc := NewEncoder(&buf)
|
enc := NewEncoder(&buf)
|
||||||
if err := enc.Encode(tt.v); err != nil {
|
if err := enc.Encode(tt.v); err != nil {
|
||||||
t.Errorf("Encode(%s): %s", tt.name, err)
|
t.Errorf("Encode(%s): %s", tt.name, err)
|
||||||
@@ -347,6 +309,21 @@ func TestBlocking(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func BenchmarkEncoderEncode(b *testing.B) {
|
||||||
|
b.ReportAllocs()
|
||||||
|
type T struct {
|
||||||
|
X, Y string
|
||||||
|
}
|
||||||
|
v := &T{"foo", "bar"}
|
||||||
|
b.RunParallel(func(pb *testing.PB) {
|
||||||
|
for pb.Next() {
|
||||||
|
if err := NewEncoder(io.Discard).Encode(v); err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
type tokenStreamCase struct {
|
type tokenStreamCase struct {
|
||||||
json string
|
json string
|
||||||
expTokens []any
|
expTokens []any
|
||||||
@@ -495,45 +472,3 @@ func TestHTTPDecoding(t *testing.T) {
|
|||||||
t.Errorf("err = %v; want io.EOF", err)
|
t.Errorf("err = %v; want io.EOF", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEncoderSetNilSafeCollection(t *testing.T) {
|
|
||||||
var (
|
|
||||||
nilSlice []interface{}
|
|
||||||
pNilSlice *[]interface{}
|
|
||||||
nilMap map[string]interface{}
|
|
||||||
pNilMap *map[string]interface{}
|
|
||||||
)
|
|
||||||
for _, tt := range []struct {
|
|
||||||
name string
|
|
||||||
v interface{}
|
|
||||||
want string
|
|
||||||
rescuedWant string
|
|
||||||
}{
|
|
||||||
{"nilSlice", nilSlice, "null", "[]"},
|
|
||||||
{"nonNilSlice", []interface{}{}, "[]", "[]"},
|
|
||||||
{"sliceWithValues", []interface{}{1, 2, 3}, "[1,2,3]", "[1,2,3]"},
|
|
||||||
{"pNilSlice", pNilSlice, "null", "null"},
|
|
||||||
{"nilMap", nilMap, "null", "{}"},
|
|
||||||
{"nonNilMap", map[string]interface{}{}, "{}", "{}"},
|
|
||||||
{"mapWithValues", map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}", "{\"1\":1,\"2\":2,\"3\":3}"},
|
|
||||||
{"pNilMap", pNilMap, "null", "null"},
|
|
||||||
} {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
enc := NewEncoder(&buf)
|
|
||||||
if err := enc.Encode(tt.v); err != nil {
|
|
||||||
t.Fatalf("Encode(%s): %s", tt.name, err)
|
|
||||||
}
|
|
||||||
if got := strings.TrimSpace(buf.String()); got != tt.want {
|
|
||||||
t.Errorf("Encode(%s) = %#q, want %#q", tt.name, got, tt.want)
|
|
||||||
}
|
|
||||||
buf.Reset()
|
|
||||||
enc.SetNilSafeCollection(true, true)
|
|
||||||
if err := enc.Encode(tt.v); err != nil {
|
|
||||||
t.Fatalf("SetNilSafeCollection(true) Encode(%s): %s", tt.name, err)
|
|
||||||
}
|
|
||||||
if got := strings.TrimSpace(buf.String()); got != tt.rescuedWant {
|
|
||||||
t.Errorf("SetNilSafeCollection(true) Encode(%s) = %#q, want %#q",
|
|
||||||
tt.name, got, tt.want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
54
googleapi/README.md
Normal file
54
googleapi/README.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
|
||||||
|
Google OAuth Setup (to send mails)
|
||||||
|
==================================
|
||||||
|
|
||||||
|
|
||||||
|
- Login @ https://console.cloud.google.com
|
||||||
|
|
||||||
|
- GMail API akivieren: https://console.cloud.google.com/apis/library/gmail.googleapis.com?
|
||||||
|
|
||||||
|
- Create new Project (aka 'BackendMailAPI') @ https://console.cloud.google.com/projectcreate
|
||||||
|
User Type: Intern
|
||||||
|
Anwendungsname: 'BackendMailAPI'
|
||||||
|
Support-Email: ...
|
||||||
|
Authorisierte Domains: 'heydyno.de' (or project domain)
|
||||||
|
Kontakt-Email: ...
|
||||||
|
|
||||||
|
|
||||||
|
- Unter "Anmeldedaten" neuer OAuth Client erstellen @ https://console.cloud.google.com/apis/credentials
|
||||||
|
Anwendungstyp: Web
|
||||||
|
Name: 'BackendMailOAuth'
|
||||||
|
Redirect-Uri: 'http://localhost/oauth'
|
||||||
|
Client-ID und Client-Key merken
|
||||||
|
|
||||||
|
- Open in Browser:
|
||||||
|
https://accounts.google.com/o/oauth2/v2/auth?redirect_uri=http://localhost/oauth&prompt=consent&response_type=code&client_id={...}&scope=https://www.googleapis.com/auth/gmail.send&access_type=offline
|
||||||
|
Code aus redirected URI merken
|
||||||
|
|
||||||
|
- Code via request einlösen (und refresh_roken merken):
|
||||||
|
|
||||||
|
```
|
||||||
|
curl --request POST \
|
||||||
|
--url https://oauth2.googleapis.com/token \
|
||||||
|
--data code={...} \
|
||||||
|
--data redirect_uri=http://localhost/oauth \
|
||||||
|
--data client_id={...} \
|
||||||
|
--data client_secret={...} \
|
||||||
|
--data grant_type=authorization_code \
|
||||||
|
--data scope=https://www.googleapis.com/auth/gmail.send
|
||||||
|
```
|
||||||
|
|
||||||
|
- Fertig, mit `client_id`, `client_secret` und `refresh_token` kann das package benutzt werden
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
46
googleapi/attachment.go
Normal file
46
googleapi/attachment.go
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
package googleapi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MailAttachment struct {
|
||||||
|
IsInline bool
|
||||||
|
ContentType string
|
||||||
|
Filename string
|
||||||
|
Data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a MailAttachment) dump() []string {
|
||||||
|
res := make([]string, 0, 4)
|
||||||
|
|
||||||
|
if a.ContentType != "" {
|
||||||
|
res = append(res, "Content-Type: "+a.ContentType+"; charset=UTF-8")
|
||||||
|
}
|
||||||
|
|
||||||
|
res = append(res, "Content-Transfer-Encoding: base64")
|
||||||
|
|
||||||
|
if a.IsInline {
|
||||||
|
if a.Filename != "" {
|
||||||
|
res = append(res, fmt.Sprintf("Content-Disposition: inline;filename=\"%s\"", a.Filename))
|
||||||
|
} else {
|
||||||
|
res = append(res, "Content-Disposition: inline")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if a.Filename != "" {
|
||||||
|
res = append(res, fmt.Sprintf("Content-Disposition: attachment;filename=\"%s\"", a.Filename))
|
||||||
|
} else {
|
||||||
|
res = append(res, "Content-Disposition: attachment")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
b64 := base64.StdEncoding.EncodeToString(a.Data)
|
||||||
|
for i := 0; i < len(b64); i += 80 {
|
||||||
|
res = append(res, b64[i:min(i+80, len(b64))])
|
||||||
|
}
|
||||||
|
|
||||||
|
res = append(res)
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
6
googleapi/body.go
Normal file
6
googleapi/body.go
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package googleapi
|
||||||
|
|
||||||
|
type MailBody struct {
|
||||||
|
Plain string
|
||||||
|
HTML string
|
||||||
|
}
|
||||||
224
googleapi/mimeMessage.go
Normal file
224
googleapi/mimeMessage.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
package googleapi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"mime"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// https://datatracker.ietf.org/doc/html/rfc2822
|
||||||
|
func encodeMimeMail(from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) string {
|
||||||
|
|
||||||
|
data := make([]string, 0, 32)
|
||||||
|
|
||||||
|
data = append(data, "Date: "+time.Now().Format(time.RFC1123Z))
|
||||||
|
data = append(data, "MIME-Version: 1.0")
|
||||||
|
data = append(data, "From: "+mime.QEncoding.Encode("UTF-8", from))
|
||||||
|
data = append(data, "To: "+strings.Join(langext.ArrMap(recipients, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", "))
|
||||||
|
if len(cc) > 0 {
|
||||||
|
data = append(data, "To: "+strings.Join(langext.ArrMap(cc, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", "))
|
||||||
|
}
|
||||||
|
if len(bcc) > 0 {
|
||||||
|
data = append(data, "Bcc: "+strings.Join(langext.ArrMap(bcc, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", "))
|
||||||
|
}
|
||||||
|
data = append(data, "Subject: "+mime.QEncoding.Encode("UTF-8", subject))
|
||||||
|
|
||||||
|
hasInlineAttachments := langext.ArrAny(attachments, func(v MailAttachment) bool { return v.IsInline })
|
||||||
|
hasNormalAttachments := langext.ArrAny(attachments, func(v MailAttachment) bool { return !v.IsInline })
|
||||||
|
hasPlain := body.Plain != ""
|
||||||
|
hasHTML := body.HTML != ""
|
||||||
|
|
||||||
|
mixedBoundary := langext.MustRawHexUUID()
|
||||||
|
relatedBoundary := langext.MustRawHexUUID()
|
||||||
|
altBoundary := langext.MustRawHexUUID()
|
||||||
|
|
||||||
|
inlineAttachments := langext.ArrFilter(attachments, func(v MailAttachment) bool { return v.IsInline })
|
||||||
|
normalAttachments := langext.ArrFilter(attachments, func(v MailAttachment) bool { return !v.IsInline })
|
||||||
|
|
||||||
|
if hasInlineAttachments && hasNormalAttachments {
|
||||||
|
// "mixed+related"
|
||||||
|
|
||||||
|
data = append(data, "Content-Type: multipart/mixed; boundary="+mixedBoundary)
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, "--"+mixedBoundary)
|
||||||
|
|
||||||
|
data = append(data, "Content-Type: multipart/related; boundary="+relatedBoundary)
|
||||||
|
data = append(data, "")
|
||||||
|
|
||||||
|
data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, relatedBoundary, altBoundary)...)
|
||||||
|
data = append(data, "")
|
||||||
|
|
||||||
|
for i, attachment := range inlineAttachments {
|
||||||
|
data = append(data, "--"+relatedBoundary)
|
||||||
|
data = append(data, attachment.dump()...)
|
||||||
|
|
||||||
|
if i < len(inlineAttachments)-1 {
|
||||||
|
data = append(data, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data = append(data, "--"+relatedBoundary+"--")
|
||||||
|
|
||||||
|
for i, attachment := range normalAttachments {
|
||||||
|
data = append(data, "--"+mixedBoundary)
|
||||||
|
data = append(data, attachment.dump()...)
|
||||||
|
|
||||||
|
if i < len(normalAttachments)-1 {
|
||||||
|
data = append(data, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data = append(data, "--"+mixedBoundary+"--")
|
||||||
|
|
||||||
|
} else if hasNormalAttachments {
|
||||||
|
// "mixed"
|
||||||
|
|
||||||
|
data = append(data, "Content-Type: multipart/mixed; boundary="+mixedBoundary)
|
||||||
|
data = append(data, "")
|
||||||
|
|
||||||
|
data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, mixedBoundary, altBoundary)...)
|
||||||
|
if hasPlain && hasHTML {
|
||||||
|
data = append(data, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, attachment := range normalAttachments {
|
||||||
|
data = append(data, "--"+mixedBoundary)
|
||||||
|
data = append(data, attachment.dump()...)
|
||||||
|
|
||||||
|
if i < len(normalAttachments)-1 {
|
||||||
|
data = append(data, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data = append(data, "--"+mixedBoundary+"--")
|
||||||
|
|
||||||
|
} else if hasInlineAttachments {
|
||||||
|
// "related"
|
||||||
|
|
||||||
|
data = append(data, "Content-Type: multipart/related; boundary="+relatedBoundary)
|
||||||
|
data = append(data, "")
|
||||||
|
|
||||||
|
data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, relatedBoundary, altBoundary)...)
|
||||||
|
data = append(data, "")
|
||||||
|
|
||||||
|
for i, attachment := range inlineAttachments {
|
||||||
|
data = append(data, "--"+relatedBoundary)
|
||||||
|
data = append(data, attachment.dump()...)
|
||||||
|
|
||||||
|
if i < len(inlineAttachments)-1 {
|
||||||
|
data = append(data, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data = append(data, "--"+relatedBoundary+"--")
|
||||||
|
|
||||||
|
} else if hasPlain && hasHTML {
|
||||||
|
// "alternative"
|
||||||
|
|
||||||
|
data = append(data, "Content-Type: multipart/alternative; boundary="+altBoundary)
|
||||||
|
data = append(data, "")
|
||||||
|
|
||||||
|
data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, altBoundary, altBoundary)...)
|
||||||
|
data = append(data, "")
|
||||||
|
|
||||||
|
data = append(data, "--"+altBoundary+"--")
|
||||||
|
|
||||||
|
} else if hasPlain {
|
||||||
|
// "plain"
|
||||||
|
|
||||||
|
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, body.Plain)
|
||||||
|
|
||||||
|
} else if hasHTML {
|
||||||
|
// "plain"
|
||||||
|
|
||||||
|
data = append(data, "Content-Type: text/html; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, body.HTML)
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// "empty??"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(data, "\r\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func dumpMailBody(body MailBody, hasInlineAttachments bool, hasNormalAttachments bool, boundary string, boundaryAlt string) []string {
|
||||||
|
|
||||||
|
if body.HTML != "" && body.Plain != "" && !hasInlineAttachments && hasNormalAttachments {
|
||||||
|
data := make([]string, 0, 16)
|
||||||
|
data = append(data, "--"+boundary)
|
||||||
|
data = append(data, "Content-Type: multipart/alternative; boundary="+boundaryAlt)
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, "--"+boundaryAlt)
|
||||||
|
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, body.Plain)
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, "--"+boundaryAlt)
|
||||||
|
data = append(data, "Content-Type: text/html; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, body.HTML)
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, "--"+boundaryAlt+"--")
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
if body.HTML != "" && body.Plain != "" && hasInlineAttachments {
|
||||||
|
data := make([]string, 0, 2)
|
||||||
|
data = append(data, "--"+boundary)
|
||||||
|
data = append(data, body.HTML)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
if body.HTML != "" && body.Plain != "" {
|
||||||
|
data := make([]string, 0, 8)
|
||||||
|
data = append(data, "--"+boundary)
|
||||||
|
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, body.Plain)
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, "--"+boundary)
|
||||||
|
data = append(data, "Content-Type: text/html; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, body.HTML)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
if body.HTML != "" {
|
||||||
|
data := make([]string, 0, 2)
|
||||||
|
data = append(data, "--"+boundary)
|
||||||
|
data = append(data, "Content-Type: text/html; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, body.HTML)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
if body.Plain != "" {
|
||||||
|
data := make([]string, 0, 2)
|
||||||
|
data = append(data, "--"+boundary)
|
||||||
|
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, body.Plain)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
data := make([]string, 0, 16)
|
||||||
|
data = append(data, "--"+boundary)
|
||||||
|
data = append(data, "Content-Type: text/plain; charset=UTF-8")
|
||||||
|
data = append(data, "Content-Transfer-Encoding: 7bit")
|
||||||
|
data = append(data, "")
|
||||||
|
data = append(data, "") // no content ?!?
|
||||||
|
return data
|
||||||
|
}
|
||||||
80
googleapi/mimeMessage_test.go
Normal file
80
googleapi/mimeMessage_test.go
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
package googleapi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestEncodeMimeMail(t *testing.T) {
|
||||||
|
|
||||||
|
mail := encodeMimeMail(
|
||||||
|
"noreply@heydyno.de",
|
||||||
|
[]string{"trash@mikescher.de"},
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"Hello Test Mail",
|
||||||
|
MailBody{Plain: "Plain Text"},
|
||||||
|
nil)
|
||||||
|
|
||||||
|
verifyMime(mail)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEncodeMimeMail2(t *testing.T) {
|
||||||
|
|
||||||
|
mail := encodeMimeMail(
|
||||||
|
"noreply@heydyno.de",
|
||||||
|
[]string{"trash@mikescher.de"},
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"Hello Test Mail (alternative)",
|
||||||
|
MailBody{
|
||||||
|
Plain: "Plain Text",
|
||||||
|
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||||
|
},
|
||||||
|
nil)
|
||||||
|
|
||||||
|
verifyMime(mail)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEncodeMimeMail3(t *testing.T) {
|
||||||
|
|
||||||
|
mail := encodeMimeMail(
|
||||||
|
"noreply@heydyno.de",
|
||||||
|
[]string{"trash@mikescher.de"},
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"Hello Test Mail (alternative)",
|
||||||
|
MailBody{
|
||||||
|
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||||
|
},
|
||||||
|
[]MailAttachment{
|
||||||
|
{Data: []byte("HelloWorld"), Filename: "test.txt", IsInline: false, ContentType: "text/plain"},
|
||||||
|
})
|
||||||
|
|
||||||
|
verifyMime(mail)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEncodeMimeMail4(t *testing.T) {
|
||||||
|
|
||||||
|
b := tst.Must(os.ReadFile("test_placeholder.png"))(t)
|
||||||
|
|
||||||
|
mail := encodeMimeMail(
|
||||||
|
"noreply@heydyno.de",
|
||||||
|
[]string{"trash@mikescher.de"},
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"Hello Test Mail (inline)",
|
||||||
|
MailBody{
|
||||||
|
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||||
|
},
|
||||||
|
[]MailAttachment{
|
||||||
|
{Data: b, Filename: "img.png", IsInline: true, ContentType: "image/png"},
|
||||||
|
})
|
||||||
|
|
||||||
|
verifyMime(mail)
|
||||||
|
}
|
||||||
|
|
||||||
|
func verifyMime(mail string) {
|
||||||
|
//fmt.Printf("%s\n\n", mail)
|
||||||
|
}
|
||||||
91
googleapi/oAuth.go
Normal file
91
googleapi/oAuth.go
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
package googleapi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GoogleOAuth interface {
|
||||||
|
AccessToken() (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type oauth struct {
|
||||||
|
clientID string
|
||||||
|
clientSecret string
|
||||||
|
refreshToken string
|
||||||
|
|
||||||
|
lock sync.RWMutex
|
||||||
|
accessToken *string
|
||||||
|
expiryDate *time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewGoogleOAuth(clientid string, clientsecret, refreshtoken string) GoogleOAuth {
|
||||||
|
return &oauth{
|
||||||
|
clientID: clientid,
|
||||||
|
clientSecret: clientsecret,
|
||||||
|
refreshToken: refreshtoken,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *oauth) AccessToken() (string, error) {
|
||||||
|
c.lock.RLock()
|
||||||
|
if c.accessToken != nil && c.expiryDate != nil && (*c.expiryDate).After(time.Now()) {
|
||||||
|
c.lock.RUnlock()
|
||||||
|
return *c.accessToken, nil // still valid
|
||||||
|
}
|
||||||
|
c.lock.RUnlock()
|
||||||
|
|
||||||
|
httpclient := http.Client{}
|
||||||
|
|
||||||
|
url := fmt.Sprintf("https://oauth2.googleapis.com/token?client_id=%s&client_secret=%s&grant_type=%s&refresh_token=%s",
|
||||||
|
c.clientID,
|
||||||
|
c.clientSecret,
|
||||||
|
"refresh_token",
|
||||||
|
c.refreshToken)
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
reqStartTime := time.Now()
|
||||||
|
|
||||||
|
res, err := httpclient.Do(req)
|
||||||
|
|
||||||
|
type response struct {
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
ExpiresIn int `json:"expires_in"`
|
||||||
|
Scope string `json:"scope"`
|
||||||
|
TokenType string `json:"token_type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var r response
|
||||||
|
|
||||||
|
data, err := io.ReadAll(res.Body)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = json.Unmarshal(data, &r)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.ExpiresIn == 0 || r.AccessToken == "" {
|
||||||
|
return "", exerr.New(exerr.TypeGoogleResponse, "google oauth returned no response").Str("body", string(data)).Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
c.lock.Lock()
|
||||||
|
c.expiryDate = langext.Ptr(reqStartTime.Add(timeext.FromSeconds(r.ExpiresIn - 10)))
|
||||||
|
c.accessToken = langext.Ptr(r.AccessToken)
|
||||||
|
c.lock.Unlock()
|
||||||
|
|
||||||
|
return r.AccessToken, nil
|
||||||
|
}
|
||||||
69
googleapi/sendMail.go
Normal file
69
googleapi/sendMail.go
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
package googleapi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MailRef struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
ThreadID string `json:"threadId"`
|
||||||
|
LabelIDs []string `json:"labelIds"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *client) SendMail(ctx context.Context, from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) (MailRef, error) {
|
||||||
|
|
||||||
|
mm := encodeMimeMail(from, recipients, cc, bcc, subject, body, attachments)
|
||||||
|
|
||||||
|
tok, err := c.oauth.AccessToken()
|
||||||
|
if err != nil {
|
||||||
|
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
url := fmt.Sprintf("https://gmail.googleapis.com/gmail/v1/users/%s/messages/send?alt=json&prettyPrint=false", "me")
|
||||||
|
|
||||||
|
msgbody, err := json.Marshal(langext.H{"raw": base64.URLEncoding.EncodeToString([]byte(mm))})
|
||||||
|
if err != nil {
|
||||||
|
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(msgbody))
|
||||||
|
if err != nil {
|
||||||
|
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Add("Authorization", "Bearer "+tok)
|
||||||
|
req.Header.Add("X-Goog-Api-Client", "blackforestbytes-goext/"+goext.GoextVersion)
|
||||||
|
req.Header.Add("User-Agent", "blackforestbytes-goext/"+goext.GoextVersion)
|
||||||
|
req.Header.Add("Content-Type", "application/json")
|
||||||
|
|
||||||
|
resp, err := c.http.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
respBody, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return MailRef{}, exerr.Wrap(err, "").Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return MailRef{}, exerr.New(exerr.TypeGoogleStatuscode, "gmail returned non-200 statuscode").Int("sc", resp.StatusCode).Str("body", string(respBody)).Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
var respObj MailRef
|
||||||
|
err = json.Unmarshal(respBody, &respObj)
|
||||||
|
if err != nil {
|
||||||
|
return MailRef{}, exerr.Wrap(err, "").Str("body", string(respBody)).Build()
|
||||||
|
}
|
||||||
|
|
||||||
|
return respObj, nil
|
||||||
|
}
|
||||||
151
googleapi/sendMail_test.go
Normal file
151
googleapi/sendMail_test.go
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
package googleapi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
if !exerr.Initialized() {
|
||||||
|
exerr.Init(exerr.ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse})
|
||||||
|
}
|
||||||
|
os.Exit(m.Run())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSendMail1(t *testing.T) {
|
||||||
|
t.Skip()
|
||||||
|
return
|
||||||
|
|
||||||
|
auth := NewGoogleOAuth(
|
||||||
|
"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com",
|
||||||
|
"TODO",
|
||||||
|
"TODO")
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
gclient := NewGoogleClient(auth)
|
||||||
|
|
||||||
|
mail, err := gclient.SendMail(
|
||||||
|
ctx,
|
||||||
|
"noreply@heydyno.de",
|
||||||
|
[]string{"trash@mikescher.de"},
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"Hello Test Mail",
|
||||||
|
MailBody{Plain: "Plain Text"},
|
||||||
|
nil)
|
||||||
|
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
fmt.Printf("mail.ID := %s\n", mail.ID)
|
||||||
|
fmt.Printf("mail.ThreadID := %s\n", mail.ThreadID)
|
||||||
|
fmt.Printf("mail.LabelIDs := %v\n", mail.LabelIDs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSendMail2(t *testing.T) {
|
||||||
|
t.Skip()
|
||||||
|
return
|
||||||
|
|
||||||
|
auth := NewGoogleOAuth(
|
||||||
|
"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com",
|
||||||
|
"TODO",
|
||||||
|
"TODO")
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
gclient := NewGoogleClient(auth)
|
||||||
|
|
||||||
|
mail, err := gclient.SendMail(
|
||||||
|
ctx,
|
||||||
|
"noreply@heydyno.de",
|
||||||
|
[]string{"trash@mikescher.de"},
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"Hello Test Mail (alternative)",
|
||||||
|
MailBody{
|
||||||
|
Plain: "Plain Text",
|
||||||
|
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||||
|
},
|
||||||
|
nil)
|
||||||
|
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
fmt.Printf("mail.ID := %s\n", mail.ID)
|
||||||
|
fmt.Printf("mail.ThreadID := %s\n", mail.ThreadID)
|
||||||
|
fmt.Printf("mail.LabelIDs := %v\n", mail.LabelIDs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSendMail3(t *testing.T) {
|
||||||
|
t.Skip()
|
||||||
|
return
|
||||||
|
|
||||||
|
auth := NewGoogleOAuth(
|
||||||
|
"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com",
|
||||||
|
"TODO",
|
||||||
|
"TODO")
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
gclient := NewGoogleClient(auth)
|
||||||
|
|
||||||
|
mail, err := gclient.SendMail(
|
||||||
|
ctx,
|
||||||
|
"noreply@heydyno.de",
|
||||||
|
[]string{"trash@mikescher.de"},
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"Hello Test Mail (attach)",
|
||||||
|
MailBody{
|
||||||
|
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||||
|
},
|
||||||
|
[]MailAttachment{
|
||||||
|
{Data: []byte("HelloWorld"), Filename: "test.txt", IsInline: false, ContentType: "text/plain"},
|
||||||
|
})
|
||||||
|
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
fmt.Printf("mail.ID := %s\n", mail.ID)
|
||||||
|
fmt.Printf("mail.ThreadID := %s\n", mail.ThreadID)
|
||||||
|
fmt.Printf("mail.LabelIDs := %v\n", mail.LabelIDs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSendMail4(t *testing.T) {
|
||||||
|
t.Skip()
|
||||||
|
return
|
||||||
|
|
||||||
|
auth := NewGoogleOAuth(
|
||||||
|
"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com",
|
||||||
|
"TODO",
|
||||||
|
"TODO")
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
gclient := NewGoogleClient(auth)
|
||||||
|
|
||||||
|
b := tst.Must(os.ReadFile("test_placeholder.png"))(t)
|
||||||
|
|
||||||
|
mail, err := gclient.SendMail(
|
||||||
|
ctx,
|
||||||
|
"noreply@heydyno.de",
|
||||||
|
[]string{"trash@mikescher.de"},
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"Hello Test Mail (inline)",
|
||||||
|
MailBody{
|
||||||
|
HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>",
|
||||||
|
},
|
||||||
|
[]MailAttachment{
|
||||||
|
{Data: b, Filename: "img.png", IsInline: true, ContentType: "image/png"},
|
||||||
|
})
|
||||||
|
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
fmt.Printf("mail.ID := %s\n", mail.ID)
|
||||||
|
fmt.Printf("mail.ThreadID := %s\n", mail.ThreadID)
|
||||||
|
fmt.Printf("mail.LabelIDs := %v\n", mail.LabelIDs)
|
||||||
|
}
|
||||||
22
googleapi/service.go
Normal file
22
googleapi/service.go
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package googleapi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GoogleClient interface {
|
||||||
|
SendMail(ctx context.Context, from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) (MailRef, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type client struct {
|
||||||
|
oauth GoogleOAuth
|
||||||
|
http http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewGoogleClient(oauth GoogleOAuth) GoogleClient {
|
||||||
|
return &client{
|
||||||
|
oauth: oauth,
|
||||||
|
http: http.Client{},
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
googleapi/test_placeholder.png
Normal file
BIN
googleapi/test_placeholder.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 11 KiB |
@@ -265,6 +265,15 @@ func ArrFirstIndex[T comparable](arr []T, needle T) int {
|
|||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ArrFirstIndexFunc[T any](arr []T, comp func(v T) bool) int {
|
||||||
|
for i, v := range arr {
|
||||||
|
if comp(v) {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
func ArrLastIndex[T comparable](arr []T, needle T) int {
|
func ArrLastIndex[T comparable](arr []T, needle T) int {
|
||||||
result := -1
|
result := -1
|
||||||
for i, v := range arr {
|
for i, v := range arr {
|
||||||
@@ -275,6 +284,16 @@ func ArrLastIndex[T comparable](arr []T, needle T) int {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ArrLastIndexFunc[T any](arr []T, comp func(v T) bool) int {
|
||||||
|
result := -1
|
||||||
|
for i, v := range arr {
|
||||||
|
if comp(v) {
|
||||||
|
result = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
func AddToSet[T comparable](set []T, add T) []T {
|
func AddToSet[T comparable](set []T, add T) []T {
|
||||||
for _, v := range set {
|
for _, v := range set {
|
||||||
if v == add {
|
if v == add {
|
||||||
@@ -400,7 +419,7 @@ func ArrCastErr[T1 any, T2 any](arr []T1) ([]T2, error) {
|
|||||||
if vcast, ok := any(v).(T2); ok {
|
if vcast, ok := any(v).(T2); ok {
|
||||||
r[i] = vcast
|
r[i] = vcast
|
||||||
} else {
|
} else {
|
||||||
return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2)))
|
return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return r, nil
|
return r, nil
|
||||||
@@ -412,7 +431,7 @@ func ArrCastPanic[T1 any, T2 any](arr []T1) []T2 {
|
|||||||
if vcast, ok := any(v).(T2); ok {
|
if vcast, ok := any(v).(T2); ok {
|
||||||
r[i] = vcast
|
r[i] = vcast
|
||||||
} else {
|
} else {
|
||||||
panic(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2)))
|
panic(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return r
|
return r
|
||||||
@@ -440,3 +459,72 @@ func ArrCopy[T any](in []T) []T {
|
|||||||
copy(out, in)
|
copy(out, in)
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ArrRemove[T comparable](arr []T, needle T) []T {
|
||||||
|
idx := ArrFirstIndex(arr, needle)
|
||||||
|
if idx >= 0 {
|
||||||
|
return append(arr[:idx], arr[idx+1:]...)
|
||||||
|
}
|
||||||
|
return arr
|
||||||
|
}
|
||||||
|
|
||||||
|
func ArrExcept[T comparable](arr []T, needles ...T) []T {
|
||||||
|
r := make([]T, 0, len(arr))
|
||||||
|
rmlist := ArrToSet(needles)
|
||||||
|
for _, v := range arr {
|
||||||
|
if _, ok := rmlist[v]; !ok {
|
||||||
|
r = append(r, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func ArrayToInterface[T any](t []T) []interface{} {
|
||||||
|
res := make([]interface{}, 0, len(t))
|
||||||
|
for i, _ := range t {
|
||||||
|
res = append(res, t[i])
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
func JoinString(arr []string, delimiter string) string {
|
||||||
|
str := ""
|
||||||
|
for i, v := range arr {
|
||||||
|
str += v
|
||||||
|
if i < len(arr)-1 {
|
||||||
|
str += delimiter
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
// ArrChunk splits the array into buckets of max-size `chunkSize`
|
||||||
|
// order is being kept.
|
||||||
|
// The last chunk may contain less than length elements.
|
||||||
|
//
|
||||||
|
// (chunkSize == -1) means no chunking
|
||||||
|
//
|
||||||
|
// see https://www.php.net/manual/en/function.array-chunk.php
|
||||||
|
func ArrChunk[T any](arr []T, chunkSize int) [][]T {
|
||||||
|
if chunkSize == -1 {
|
||||||
|
return [][]T{arr}
|
||||||
|
}
|
||||||
|
|
||||||
|
res := make([][]T, 0, 1+len(arr)/chunkSize)
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for i < len(arr) {
|
||||||
|
|
||||||
|
right := i + chunkSize
|
||||||
|
if right >= len(arr) {
|
||||||
|
right = len(arr)
|
||||||
|
}
|
||||||
|
|
||||||
|
res = append(res, arr[i:right])
|
||||||
|
|
||||||
|
i = right
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|||||||
12
langext/array_test.go
Normal file
12
langext/array_test.go
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package langext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestJoinString(t *testing.T) {
|
||||||
|
ids := []string{"1", "2", "3"}
|
||||||
|
res := JoinString(ids, ",")
|
||||||
|
tst.AssertEqual(t, res, "1,2,3")
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package langext
|
package langext
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -59,9 +60,3 @@ func TestBase58FlickrDecoding(t *testing.T) {
|
|||||||
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "9aJCVZR"), "Hello")
|
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "9aJCVZR"), "Hello")
|
||||||
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "48638rmBiUzG5NKQoX4KcuE5C8paCFACnE28F7qDx13PRtennAmYSSJQ5gJSRihf5ZDyEQS4UimtihR7uARt4wbty2fW9duTQTM9n1DwUBevreyzGwu6W4YSgrvQgCPDxsiE1mCdZsF8VEBpuHHEiJyw"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.")
|
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "48638rmBiUzG5NKQoX4KcuE5C8paCFACnE28F7qDx13PRtennAmYSSJQ5gJSRihf5ZDyEQS4UimtihR7uARt4wbty2fW9duTQTM9n1DwUBevreyzGwu6W4YSgrvQgCPDxsiE1mCdZsF8VEBpuHHEiJyw"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.")
|
||||||
}
|
}
|
||||||
|
|
||||||
func tst.AssertEqual(t *testing.T, actual string, expected string) {
|
|
||||||
if actual != expected {
|
|
||||||
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
178
langext/baseAny.go
Normal file
178
langext/baseAny.go
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
package langext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"errors"
|
||||||
|
"math"
|
||||||
|
"math/big"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AnyBaseConverter struct {
|
||||||
|
base uint64
|
||||||
|
charset []rune
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAnyBaseConverter(cs string) AnyBaseConverter {
|
||||||
|
rcs := []rune(cs)
|
||||||
|
return AnyBaseConverter{
|
||||||
|
base: uint64(len(rcs)),
|
||||||
|
charset: rcs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (bc AnyBaseConverter) Rand(rlen int) string {
|
||||||
|
biBase := big.NewInt(int64(bc.base))
|
||||||
|
|
||||||
|
randMax := big.NewInt(math.MaxInt64)
|
||||||
|
|
||||||
|
r := ""
|
||||||
|
|
||||||
|
for i := 0; i < rlen; i++ {
|
||||||
|
v, err := rand.Int(rand.Reader, randMax)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
r += string(bc.charset[v.Mod(v, biBase).Int64()])
|
||||||
|
}
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (bc AnyBaseConverter) EncodeUInt64(num uint64) string {
|
||||||
|
if num == 0 {
|
||||||
|
return "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
b := ""
|
||||||
|
|
||||||
|
// loop as long the num is bigger than zero
|
||||||
|
for num > 0 {
|
||||||
|
r := num % bc.base
|
||||||
|
|
||||||
|
num -= r
|
||||||
|
num /= base62Base
|
||||||
|
|
||||||
|
b += string(bc.charset[int(r)])
|
||||||
|
}
|
||||||
|
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (bc AnyBaseConverter) DecodeUInt64(str string) (uint64, error) {
|
||||||
|
if str == "" {
|
||||||
|
return 0, errors.New("empty string")
|
||||||
|
}
|
||||||
|
|
||||||
|
result := uint64(0)
|
||||||
|
|
||||||
|
for _, v := range str {
|
||||||
|
result *= base62Base
|
||||||
|
|
||||||
|
pos := ArrFirstIndex(bc.charset, v)
|
||||||
|
if pos == -1 {
|
||||||
|
return 0, errors.New("invalid character: " + string(v))
|
||||||
|
}
|
||||||
|
|
||||||
|
result += uint64(pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (bc AnyBaseConverter) Encode(src []byte) string {
|
||||||
|
value := new(big.Int)
|
||||||
|
value.SetBytes(src)
|
||||||
|
return bc.EncodeBigInt(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (bc AnyBaseConverter) EncodeBigInt(src *big.Int) string {
|
||||||
|
value := new(big.Int)
|
||||||
|
value.Set(src)
|
||||||
|
|
||||||
|
isneg := value.Sign() < 0
|
||||||
|
|
||||||
|
answer := ""
|
||||||
|
|
||||||
|
if isneg {
|
||||||
|
value.Neg(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
biBase := big.NewInt(int64(bc.base))
|
||||||
|
|
||||||
|
rem := new(big.Int)
|
||||||
|
|
||||||
|
for value.Sign() > 0 {
|
||||||
|
value.QuoRem(value, biBase, rem)
|
||||||
|
answer = string(bc.charset[rem.Int64()]) + answer
|
||||||
|
}
|
||||||
|
|
||||||
|
if isneg {
|
||||||
|
return "-" + answer
|
||||||
|
} else {
|
||||||
|
return answer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (bc AnyBaseConverter) Decode(src string) ([]byte, error) {
|
||||||
|
value, err := bc.DecodeToBigInt(src)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return value.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (bc AnyBaseConverter) DecodeToBigInt(_src string) (*big.Int, error) {
|
||||||
|
result := new(big.Int)
|
||||||
|
result.SetInt64(0)
|
||||||
|
|
||||||
|
src := []rune(_src)
|
||||||
|
|
||||||
|
if len(src) == 0 {
|
||||||
|
return nil, errors.New("string is empty")
|
||||||
|
}
|
||||||
|
if bc.base < 2 {
|
||||||
|
return nil, errors.New("not enough digits")
|
||||||
|
}
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
|
||||||
|
sign := new(big.Int)
|
||||||
|
sign.SetInt64(1)
|
||||||
|
if src[i] == '+' {
|
||||||
|
i++
|
||||||
|
} else if src[i] == '-' {
|
||||||
|
i++
|
||||||
|
sign.SetInt64(-1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if i >= len(src) {
|
||||||
|
return nil, errors.New("no digits in input")
|
||||||
|
}
|
||||||
|
|
||||||
|
biBase := big.NewInt(int64(bc.base))
|
||||||
|
|
||||||
|
oldResult := new(big.Int)
|
||||||
|
|
||||||
|
for ; i < len(src); i++ {
|
||||||
|
n := ArrFirstIndex(bc.charset, src[i])
|
||||||
|
if n < 0 {
|
||||||
|
return nil, errors.New("invalid characters in input")
|
||||||
|
}
|
||||||
|
|
||||||
|
oldResult.Set(result)
|
||||||
|
|
||||||
|
result.Mul(result, biBase)
|
||||||
|
result.Add(result, big.NewInt(int64(n)))
|
||||||
|
|
||||||
|
if result.Cmp(oldResult) < 0 {
|
||||||
|
return nil, errors.New("overflow")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if sign.Cmp(big.NewInt(0)) < 0 {
|
||||||
|
result.Neg(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
80
langext/baseAny_test.go
Normal file
80
langext/baseAny_test.go
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
package langext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func _anyEncStr(bc AnyBaseConverter, v string) string {
|
||||||
|
vr := bc.Encode([]byte(v))
|
||||||
|
return vr
|
||||||
|
}
|
||||||
|
|
||||||
|
func _anyDecStr(bc AnyBaseConverter, v string) string {
|
||||||
|
vr, err := bc.Decode(v)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return string(vr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAnyBase58DefaultEncoding(t *testing.T) {
|
||||||
|
tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "Hello"), "9Ajdvzr")
|
||||||
|
tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in."), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAnyBase58DefaultDecoding(t *testing.T) {
|
||||||
|
tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "9Ajdvzr"), "Hello")
|
||||||
|
tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAnyBaseDecode(t *testing.T) {
|
||||||
|
|
||||||
|
const (
|
||||||
|
Binary = "01"
|
||||||
|
Decimal = "0123456789"
|
||||||
|
Hex = "0123456789ABCDEF"
|
||||||
|
DNA = "ACGT"
|
||||||
|
Base32 = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"
|
||||||
|
Base58 = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
|
||||||
|
Base62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||||
|
Base64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||||
|
Base256 = "🚀🪐☄🛰🌌🌑🌒🌓🌔🌕🌖🌗🌘🌍🌏🌎🐉☀💻🖥💾💿😂❤😍🤣😊🙏💕😭😘👍😅👏😁🔥🥰💔💖💙😢🤔😆🙄💪😉☺👌🤗💜😔😎😇🌹🤦🎉💞✌✨🤷😱😌🌸🙌😋💗💚😏💛🙂💓🤩😄😀🖤😃💯🙈👇🎶😒🤭❣😜💋👀😪😑💥🙋😞😩😡🤪👊🥳😥🤤👉💃😳✋😚😝😴🌟😬🙃🍀🌷😻😓⭐✅🥺🌈😈🤘💦✔😣🏃💐☹🎊💘😠☝😕🌺🎂🌻😐🖕💝🙊😹🗣💫💀👑🎵🤞😛🔴😤🌼😫⚽🤙☕🏆🤫👈😮🙆🍻🍃🐶💁😲🌿🧡🎁⚡🌞🎈❌✊👋😰🤨😶🤝🚶💰🍓💢🤟🙁🚨💨🤬✈🎀🍺🤓😙💟🌱😖👶🥴▶➡❓💎💸⬇😨🌚🦋😷🕺⚠🙅😟😵👎🤲🤠🤧📌🔵💅🧐🐾🍒😗🤑🌊🤯🐷☎💧😯💆👆🎤🙇🍑❄🌴💣🐸💌📍🥀🤢👅💡💩👐📸👻🤐🤮🎼🥵🚩🍎🍊👼💍📣🥂"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TestDef struct {
|
||||||
|
FromCS string
|
||||||
|
FromVal string
|
||||||
|
ToCS string
|
||||||
|
ToVal string
|
||||||
|
}
|
||||||
|
|
||||||
|
defs := []TestDef{
|
||||||
|
{Binary, "10100101011100000101010", Decimal, "5421098"},
|
||||||
|
{Decimal, "5421098", DNA, "CCAGGTGAAGGG"},
|
||||||
|
{Decimal, "5421098", DNA, "CCAGGTGAAGGG"},
|
||||||
|
{Decimal, "80085", Base256, "🪐💞🔵"},
|
||||||
|
{Hex, "48656C6C6C20576F526C5421", Base64, "SGVsbGwgV29SbFQh"},
|
||||||
|
{Base64, "SGVsbGw/gV29SbF+Qh", Base32, "CIMVWGY3B7QFO32SNRPZBB"},
|
||||||
|
{Base64, "SGVsbGw/gV29SbF+Qh", Base58, "2fUsGKQUcgQcwSqpvy6"},
|
||||||
|
{Base64, "SGVsbGw/gV29SbF+Qh", Base62, "V34nvybdQ3m3RHk9Sr"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, def := range defs {
|
||||||
|
|
||||||
|
d1 := NewAnyBaseConverter(def.FromCS)
|
||||||
|
d2 := NewAnyBaseConverter(def.ToCS)
|
||||||
|
|
||||||
|
v1 := tst.Must(d1.Decode(def.FromVal))(t)
|
||||||
|
v2 := tst.Must(d2.Decode(def.ToVal))(t)
|
||||||
|
|
||||||
|
tst.AssertArrayEqual(t, v1, v2)
|
||||||
|
|
||||||
|
str2 := d2.Encode(v1)
|
||||||
|
tst.AssertEqual(t, str2, def.ToVal)
|
||||||
|
|
||||||
|
str1 := d1.Encode(v2)
|
||||||
|
tst.AssertEqual(t, str1, def.FromVal)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -29,6 +29,22 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ArrToKVMap[T any, K comparable, V any](a []T, keyfunc func(T) K, valfunc func(T) V) map[K]V {
|
||||||
|
result := make(map[K]V, len(a))
|
||||||
|
for _, v := range a {
|
||||||
|
result[keyfunc(v)] = valfunc(v)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func ArrToSet[T comparable](a []T) map[T]bool {
|
||||||
|
result := make(map[T]bool, len(a))
|
||||||
|
for _, v := range a {
|
||||||
|
result[v] = true
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
func MapToArr[T comparable, V any](v map[T]V) []MapEntry[T, V] {
|
func MapToArr[T comparable, V any](v map[T]V) []MapEntry[T, V] {
|
||||||
result := make([]MapEntry[T, V], 0, len(v))
|
result := make([]MapEntry[T, V], 0, len(v))
|
||||||
for mk, mv := range v {
|
for mk, mv := range v {
|
||||||
|
|||||||
21
langext/must.go
Normal file
21
langext/must.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
package langext
|
||||||
|
|
||||||
|
// Must returns a value and panics on error
|
||||||
|
//
|
||||||
|
// Usage: Must(methodWithError(...))
|
||||||
|
func Must[T any](v T, err error) T {
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustBool returns a value and panics on missing
|
||||||
|
//
|
||||||
|
// Usage: MustBool(methodWithOkayReturn(...))
|
||||||
|
func MustBool[T any](v T, ok bool) T {
|
||||||
|
if !ok {
|
||||||
|
panic("not ok")
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
package langext
|
package langext
|
||||||
|
|
||||||
|
import "runtime/debug"
|
||||||
|
|
||||||
type PanicWrappedErr struct {
|
type PanicWrappedErr struct {
|
||||||
panic any
|
panic any
|
||||||
|
Stack string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p PanicWrappedErr) Error() string {
|
func (p PanicWrappedErr) Error() string {
|
||||||
@@ -15,7 +18,7 @@ func (p PanicWrappedErr) ReoveredObj() any {
|
|||||||
func RunPanicSafe(fn func()) (err error) {
|
func RunPanicSafe(fn func()) (err error) {
|
||||||
defer func() {
|
defer func() {
|
||||||
if rec := recover(); rec != nil {
|
if rec := recover(); rec != nil {
|
||||||
err = PanicWrappedErr{panic: rec}
|
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
@@ -27,7 +30,7 @@ func RunPanicSafe(fn func()) (err error) {
|
|||||||
func RunPanicSafeR1(fn func() error) (err error) {
|
func RunPanicSafeR1(fn func() error) (err error) {
|
||||||
defer func() {
|
defer func() {
|
||||||
if rec := recover(); rec != nil {
|
if rec := recover(); rec != nil {
|
||||||
err = PanicWrappedErr{panic: rec}
|
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
@@ -38,7 +41,7 @@ func RunPanicSafeR2[T1 any](fn func() (T1, error)) (r1 T1, err error) {
|
|||||||
defer func() {
|
defer func() {
|
||||||
if rec := recover(); rec != nil {
|
if rec := recover(); rec != nil {
|
||||||
r1 = *new(T1)
|
r1 = *new(T1)
|
||||||
err = PanicWrappedErr{panic: rec}
|
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
@@ -50,7 +53,7 @@ func RunPanicSafeR3[T1 any, T2 any](fn func() (T1, T2, error)) (r1 T1, r2 T2, er
|
|||||||
if rec := recover(); rec != nil {
|
if rec := recover(); rec != nil {
|
||||||
r1 = *new(T1)
|
r1 = *new(T1)
|
||||||
r2 = *new(T2)
|
r2 = *new(T2)
|
||||||
err = PanicWrappedErr{panic: rec}
|
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
@@ -63,7 +66,7 @@ func RunPanicSafeR4[T1 any, T2 any, T3 any](fn func() (T1, T2, T3, error)) (r1 T
|
|||||||
r1 = *new(T1)
|
r1 = *new(T1)
|
||||||
r2 = *new(T2)
|
r2 = *new(T2)
|
||||||
r3 = *new(T3)
|
r3 = *new(T3)
|
||||||
err = PanicWrappedErr{panic: rec}
|
err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
|||||||
@@ -10,10 +10,23 @@ var PTrue = Ptr(true)
|
|||||||
// PFalse := &false
|
// PFalse := &false
|
||||||
var PFalse = Ptr(false)
|
var PFalse = Ptr(false)
|
||||||
|
|
||||||
|
// PNil := &nil
|
||||||
|
var PNil = Ptr[any](nil)
|
||||||
|
|
||||||
func Ptr[T any](v T) *T {
|
func Ptr[T any](v T) *T {
|
||||||
return &v
|
return &v
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func DblPtr[T any](v T) **T {
|
||||||
|
v_ := &v
|
||||||
|
return &v_
|
||||||
|
}
|
||||||
|
|
||||||
|
func DblPtrNil[T any]() **T {
|
||||||
|
var v *T = nil
|
||||||
|
return &v
|
||||||
|
}
|
||||||
|
|
||||||
func PtrInt32(v int32) *int32 {
|
func PtrInt32(v int32) *int32 {
|
||||||
return &v
|
return &v
|
||||||
}
|
}
|
||||||
@@ -35,7 +48,7 @@ func IsNil(i interface{}) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
switch reflect.TypeOf(i).Kind() {
|
switch reflect.TypeOf(i).Kind() {
|
||||||
case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice:
|
case reflect.Ptr, reflect.Map, reflect.Chan, reflect.Slice, reflect.Func, reflect.UnsafePointer:
|
||||||
return reflect.ValueOf(i).IsNil()
|
return reflect.ValueOf(i).IsNil()
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
|||||||
29
langext/url.go
Normal file
29
langext/url.go
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
package langext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildUrl(url, path string, params *map[string]string) string {
|
||||||
|
if path[:1] == "/" && url[len(url)-1:] == "/" {
|
||||||
|
url += path[1:]
|
||||||
|
} else if path[:1] != "/" && url[len(url)-1:] != "/" {
|
||||||
|
url += "/" + path
|
||||||
|
} else {
|
||||||
|
url += path
|
||||||
|
}
|
||||||
|
|
||||||
|
if params == nil {
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value := range *params {
|
||||||
|
if strings.Contains(url, "?") {
|
||||||
|
url += fmt.Sprintf("&%s=%s", key, value)
|
||||||
|
} else {
|
||||||
|
url += fmt.Sprintf("?%s=%s", key, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return url
|
||||||
|
}
|
||||||
45
langext/url_test.go
Normal file
45
langext/url_test.go
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
package langext
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBuildUrl(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
Url string
|
||||||
|
Path string
|
||||||
|
Params *map[string]string
|
||||||
|
Want string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
Url: "https://test.heydyno.de/",
|
||||||
|
Path: "/testing-01",
|
||||||
|
Params: &map[string]string{"param1": "value1"},
|
||||||
|
Want: "https://test.heydyno.de/testing-01?param1=value1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Url: "https://test.heydyno.de",
|
||||||
|
Path: "testing-01",
|
||||||
|
Params: &map[string]string{"param1": "value1"},
|
||||||
|
Want: "https://test.heydyno.de/testing-01?param1=value1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Url: "https://test.heydyno.de",
|
||||||
|
Path: "/testing-01",
|
||||||
|
Params: nil,
|
||||||
|
Want: "https://test.heydyno.de/testing-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Url: "https://test.heydyno.de/",
|
||||||
|
Path: "testing-01",
|
||||||
|
Params: nil,
|
||||||
|
Want: "https://test.heydyno.de/testing-01",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
res := BuildUrl(test.Url, test.Path, test.Params)
|
||||||
|
tst.AssertEqual(t, res, test.Want)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
(go.mongodb.org/mongo-driver/x/mongo/driver.Connection).Close
|
|
||||||
(*go.mongodb.org/mongo-driver/x/network/connection.connection).Close
|
|
||||||
(go.mongodb.org/mongo-driver/x/network/connection.Connection).Close
|
|
||||||
(*go.mongodb.org/mongo-driver/x/mongo/driver/topology.connection).close
|
|
||||||
(*go.mongodb.org/mongo-driver/x/mongo/driver/topology.Topology).Unsubscribe
|
|
||||||
(*go.mongodb.org/mongo-driver/x/mongo/driver/topology.Server).Close
|
|
||||||
(*go.mongodb.org/mongo-driver/x/network/connection.pool).closeConnection
|
|
||||||
(*go.mongodb.org/mongo-driver/x/mongo/driver/topology.pool).close
|
|
||||||
(go.mongodb.org/mongo-driver/x/network/wiremessage.ReadWriteCloser).Close
|
|
||||||
(*go.mongodb.org/mongo-driver/mongo.Cursor).Close
|
|
||||||
(*go.mongodb.org/mongo-driver/mongo.ChangeStream).Close
|
|
||||||
(*go.mongodb.org/mongo-driver/mongo.Client).Disconnect
|
|
||||||
(net.Conn).Close
|
|
||||||
encoding/pem.Encode
|
|
||||||
fmt.Fprintf
|
|
||||||
fmt.Fprint
|
|
||||||
13
mongo/.gitignore
vendored
13
mongo/.gitignore
vendored
@@ -1,13 +0,0 @@
|
|||||||
.vscode
|
|
||||||
debug
|
|
||||||
.idea
|
|
||||||
*.iml
|
|
||||||
*.ipr
|
|
||||||
*.iws
|
|
||||||
.idea
|
|
||||||
*.sublime-project
|
|
||||||
*.sublime-workspace
|
|
||||||
driver-test-data.tar.gz
|
|
||||||
perf
|
|
||||||
**mongocryptd.pid
|
|
||||||
*.test
|
|
||||||
3
mongo/.gitmodules
vendored
3
mongo/.gitmodules
vendored
@@ -1,3 +0,0 @@
|
|||||||
[submodule "specifications"]
|
|
||||||
path = specifications
|
|
||||||
url = git@github.com:mongodb/specifications.git
|
|
||||||
@@ -1,123 +0,0 @@
|
|||||||
run:
|
|
||||||
timeout: 5m
|
|
||||||
|
|
||||||
linters:
|
|
||||||
disable-all: true
|
|
||||||
# TODO(GODRIVER-2156): Enable all commented-out linters.
|
|
||||||
enable:
|
|
||||||
- errcheck
|
|
||||||
# - errorlint
|
|
||||||
- gocritic
|
|
||||||
- goimports
|
|
||||||
- gosimple
|
|
||||||
- gosec
|
|
||||||
- govet
|
|
||||||
- ineffassign
|
|
||||||
- makezero
|
|
||||||
- misspell
|
|
||||||
- nakedret
|
|
||||||
- paralleltest
|
|
||||||
- prealloc
|
|
||||||
- revive
|
|
||||||
- staticcheck
|
|
||||||
- typecheck
|
|
||||||
- unused
|
|
||||||
- unconvert
|
|
||||||
- unparam
|
|
||||||
|
|
||||||
linters-settings:
|
|
||||||
errcheck:
|
|
||||||
exclude: .errcheck-excludes
|
|
||||||
gocritic:
|
|
||||||
enabled-checks:
|
|
||||||
# Detects suspicious append result assignments. E.g. "b := append(a, 1, 2, 3)"
|
|
||||||
- appendAssign
|
|
||||||
govet:
|
|
||||||
disable:
|
|
||||||
- cgocall
|
|
||||||
- composites
|
|
||||||
paralleltest:
|
|
||||||
# Ignore missing calls to `t.Parallel()` and only report incorrect uses of `t.Parallel()`.
|
|
||||||
ignore-missing: true
|
|
||||||
staticcheck:
|
|
||||||
checks: [
|
|
||||||
"all",
|
|
||||||
"-SA1019", # Disable deprecation warnings for now.
|
|
||||||
"-SA1012", # Disable "do not pass a nil Context" to allow testing nil contexts in tests.
|
|
||||||
]
|
|
||||||
|
|
||||||
issues:
|
|
||||||
exclude-use-default: false
|
|
||||||
exclude:
|
|
||||||
# Add all default excluded issues except issues related to exported types/functions not having
|
|
||||||
# comments; we want those warnings. The defaults are copied from the "--exclude-use-default"
|
|
||||||
# documentation on https://golangci-lint.run/usage/configuration/#command-line-options
|
|
||||||
## Defaults ##
|
|
||||||
# EXC0001 errcheck: Almost all programs ignore errors on these functions and in most cases it's ok
|
|
||||||
- Error return value of .((os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*print(f|ln)?|os\.(Un)?Setenv). is not checked
|
|
||||||
# EXC0003 golint: False positive when tests are defined in package 'test'
|
|
||||||
- func name will be used as test\.Test.* by other packages, and that stutters; consider calling this
|
|
||||||
# EXC0004 govet: Common false positives
|
|
||||||
- (possible misuse of unsafe.Pointer|should have signature)
|
|
||||||
# EXC0005 staticcheck: Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore
|
|
||||||
- ineffective break statement. Did you mean to break out of the outer loop
|
|
||||||
# EXC0006 gosec: Too many false-positives on 'unsafe' usage
|
|
||||||
- Use of unsafe calls should be audited
|
|
||||||
# EXC0007 gosec: Too many false-positives for parametrized shell calls
|
|
||||||
- Subprocess launch(ed with variable|ing should be audited)
|
|
||||||
# EXC0008 gosec: Duplicated errcheck checks
|
|
||||||
- (G104|G307)
|
|
||||||
# EXC0009 gosec: Too many issues in popular repos
|
|
||||||
- (Expect directory permissions to be 0750 or less|Expect file permissions to be 0600 or less)
|
|
||||||
# EXC0010 gosec: False positive is triggered by 'src, err := ioutil.ReadFile(filename)'
|
|
||||||
- Potential file inclusion via variable
|
|
||||||
## End Defaults ##
|
|
||||||
|
|
||||||
# Ignore capitalization warning for this weird field name.
|
|
||||||
- "var-naming: struct field CqCssWxW should be CqCSSWxW"
|
|
||||||
# Ignore warnings for common "wiremessage.Read..." usage because the safest way to use that API
|
|
||||||
# is by assigning possibly unused returned byte buffers.
|
|
||||||
- "SA4006: this value of `wm` is never used"
|
|
||||||
- "SA4006: this value of `rem` is never used"
|
|
||||||
- "ineffectual assignment to wm"
|
|
||||||
- "ineffectual assignment to rem"
|
|
||||||
|
|
||||||
skip-dirs-use-default: false
|
|
||||||
skip-dirs:
|
|
||||||
- (^|/)vendor($|/)
|
|
||||||
- (^|/)testdata($|/)
|
|
||||||
- (^|/)etc($|/)
|
|
||||||
exclude-rules:
|
|
||||||
# Ignore some linters for example code that is intentionally simplified.
|
|
||||||
- path: examples/
|
|
||||||
linters:
|
|
||||||
- revive
|
|
||||||
- errcheck
|
|
||||||
# Disable unused code linters for the copy/pasted "awsv4" package.
|
|
||||||
- path: x/mongo/driver/auth/internal/awsv4
|
|
||||||
linters:
|
|
||||||
- unused
|
|
||||||
# Disable "unused" linter for code files that depend on the "mongocrypt.MongoCrypt" type because
|
|
||||||
# the linter build doesn't work correctly with CGO enabled. As a result, all calls to a
|
|
||||||
# "mongocrypt.MongoCrypt" API appear to always panic (see mongocrypt_not_enabled.go), leading
|
|
||||||
# to confusing messages about unused code.
|
|
||||||
- path: x/mongo/driver/crypt.go|mongo/(crypt_retrievers|mongocryptd).go
|
|
||||||
linters:
|
|
||||||
- unused
|
|
||||||
# Ignore "TLS MinVersion too low", "TLS InsecureSkipVerify set true", and "Use of weak random
|
|
||||||
# number generator (math/rand instead of crypto/rand)" in tests.
|
|
||||||
- path: _test\.go
|
|
||||||
text: G401|G402|G404
|
|
||||||
linters:
|
|
||||||
- gosec
|
|
||||||
# Ignore missing comments for exported variable/function/type for code in the "internal" and
|
|
||||||
# "benchmark" directories.
|
|
||||||
- path: (internal\/|benchmark\/)
|
|
||||||
text: exported (.+) should have comment( \(or a comment on this block\))? or be unexported
|
|
||||||
# Ignore missing package comments for directories that aren't frequently used by external users.
|
|
||||||
- path: (internal\/|benchmark\/|x\/|cmd\/|mongo\/integration\/)
|
|
||||||
text: should have a package comment
|
|
||||||
# Disable unused linter for "golang.org/x/exp/rand" package in internal/randutil/rand.
|
|
||||||
- path: internal/randutil/rand
|
|
||||||
linters:
|
|
||||||
- unused
|
|
||||||
201
mongo/LICENSE
201
mongo/LICENSE
@@ -1,201 +0,0 @@
|
|||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
210
mongo/Makefile
210
mongo/Makefile
@@ -1,210 +0,0 @@
|
|||||||
ATLAS_URIS = "$(ATLAS_FREE)" "$(ATLAS_REPLSET)" "$(ATLAS_SHARD)" "$(ATLAS_TLS11)" "$(ATLAS_TLS12)" "$(ATLAS_FREE_SRV)" "$(ATLAS_REPLSET_SRV)" "$(ATLAS_SHARD_SRV)" "$(ATLAS_TLS11_SRV)" "$(ATLAS_TLS12_SRV)" "$(ATLAS_SERVERLESS)" "$(ATLAS_SERVERLESS_SRV)"
|
|
||||||
TEST_TIMEOUT = 1800
|
|
||||||
|
|
||||||
### Utility targets. ###
|
|
||||||
.PHONY: default
|
|
||||||
default: build check-license check-fmt check-modules lint test-short
|
|
||||||
|
|
||||||
.PHONY: add-license
|
|
||||||
add-license:
|
|
||||||
etc/check_license.sh -a
|
|
||||||
|
|
||||||
.PHONY: check-license
|
|
||||||
check-license:
|
|
||||||
etc/check_license.sh
|
|
||||||
|
|
||||||
.PHONY: build
|
|
||||||
build: cross-compile build-tests build-compile-check
|
|
||||||
go build ./...
|
|
||||||
go build $(BUILD_TAGS) ./...
|
|
||||||
|
|
||||||
# Use ^$ to match no tests so that no tests are actually run but all tests are
|
|
||||||
# compiled. Run with -short to ensure none of the TestMain functions try to
|
|
||||||
# connect to a server.
|
|
||||||
.PHONY: build-tests
|
|
||||||
build-tests:
|
|
||||||
go test -short $(BUILD_TAGS) -run ^$$ ./...
|
|
||||||
|
|
||||||
.PHONY: build-compile-check
|
|
||||||
build-compile-check:
|
|
||||||
etc/compile_check.sh
|
|
||||||
|
|
||||||
# Cross-compiling on Linux for architectures 386, arm, arm64, amd64, ppc64le, and s390x.
|
|
||||||
# Omit any build tags because we don't expect our build environment to support compiling the C
|
|
||||||
# libraries for other architectures.
|
|
||||||
.PHONY: cross-compile
|
|
||||||
cross-compile:
|
|
||||||
GOOS=linux GOARCH=386 go build ./...
|
|
||||||
GOOS=linux GOARCH=arm go build ./...
|
|
||||||
GOOS=linux GOARCH=arm64 go build ./...
|
|
||||||
GOOS=linux GOARCH=amd64 go build ./...
|
|
||||||
GOOS=linux GOARCH=ppc64le go build ./...
|
|
||||||
GOOS=linux GOARCH=s390x go build ./...
|
|
||||||
|
|
||||||
.PHONY: install-lll
|
|
||||||
install-lll:
|
|
||||||
go install github.com/walle/lll/...@latest
|
|
||||||
|
|
||||||
.PHONY: check-fmt
|
|
||||||
check-fmt: install-lll
|
|
||||||
etc/check_fmt.sh
|
|
||||||
|
|
||||||
# check-modules runs "go mod tidy" then "go mod vendor" and exits with a non-zero exit code if there
|
|
||||||
# are any module or vendored modules changes. The intent is to confirm two properties:
|
|
||||||
#
|
|
||||||
# 1. Exactly the required modules are declared as dependencies. We should always be able to run
|
|
||||||
# "go mod tidy" and expect that no unrelated changes are made to the "go.mod" file.
|
|
||||||
#
|
|
||||||
# 2. All required modules are copied into the vendor/ directory and are an exact copy of the
|
|
||||||
# original module source code (i.e. the vendored modules are not modified from their original code).
|
|
||||||
.PHONY: check-modules
|
|
||||||
check-modules:
|
|
||||||
go mod tidy -v
|
|
||||||
go mod vendor
|
|
||||||
git diff --exit-code go.mod go.sum ./vendor
|
|
||||||
|
|
||||||
.PHONY: doc
|
|
||||||
doc:
|
|
||||||
godoc -http=:6060 -index
|
|
||||||
|
|
||||||
.PHONY: fmt
|
|
||||||
fmt:
|
|
||||||
go fmt ./...
|
|
||||||
|
|
||||||
.PHONY: install-golangci-lint
|
|
||||||
install-golangci-lint:
|
|
||||||
go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.51.0
|
|
||||||
|
|
||||||
# Lint with various GOOS and GOARCH targets to catch static analysis failures that may only affect
|
|
||||||
# specific operating systems or architectures. For example, staticcheck will only check for 64-bit
|
|
||||||
# alignment of atomically accessed variables on 32-bit architectures (see
|
|
||||||
# https://staticcheck.io/docs/checks#SA1027)
|
|
||||||
.PHONY: lint
|
|
||||||
lint: install-golangci-lint
|
|
||||||
GOOS=linux GOARCH=386 golangci-lint run --config .golangci.yml ./...
|
|
||||||
GOOS=linux GOARCH=arm golangci-lint run --config .golangci.yml ./...
|
|
||||||
GOOS=linux GOARCH=arm64 golangci-lint run --config .golangci.yml ./...
|
|
||||||
GOOS=linux GOARCH=amd64 golangci-lint run --config .golangci.yml ./...
|
|
||||||
GOOS=linux GOARCH=ppc64le golangci-lint run --config .golangci.yml ./...
|
|
||||||
GOOS=linux GOARCH=s390x golangci-lint run --config .golangci.yml ./...
|
|
||||||
|
|
||||||
.PHONY: update-notices
|
|
||||||
update-notices:
|
|
||||||
etc/generate_notices.pl > THIRD-PARTY-NOTICES
|
|
||||||
|
|
||||||
### Local testing targets. ###
|
|
||||||
.PHONY: test
|
|
||||||
test:
|
|
||||||
go test $(BUILD_TAGS) -timeout $(TEST_TIMEOUT)s -p 1 ./...
|
|
||||||
|
|
||||||
.PHONY: test-cover
|
|
||||||
test-cover:
|
|
||||||
go test $(BUILD_TAGS) -timeout $(TEST_TIMEOUT)s -cover $(COVER_ARGS) -p 1 ./...
|
|
||||||
|
|
||||||
.PHONY: test-race
|
|
||||||
test-race:
|
|
||||||
go test $(BUILD_TAGS) -timeout $(TEST_TIMEOUT)s -race -p 1 ./...
|
|
||||||
|
|
||||||
.PHONY: test-short
|
|
||||||
test-short:
|
|
||||||
go test $(BUILD_TAGS) -timeout 60s -short ./...
|
|
||||||
|
|
||||||
### Evergreen specific targets. ###
|
|
||||||
.PHONY: build-aws-ecs-test
|
|
||||||
build-aws-ecs-test:
|
|
||||||
go build $(BUILD_TAGS) ./cmd/testaws/main.go
|
|
||||||
|
|
||||||
.PHONY: evg-test
|
|
||||||
evg-test:
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s -p 1 ./... >> test.suite
|
|
||||||
|
|
||||||
.PHONY: evg-test-atlas
|
|
||||||
evg-test-atlas:
|
|
||||||
go run ./cmd/testatlas/main.go $(ATLAS_URIS)
|
|
||||||
|
|
||||||
.PHONY: evg-test-atlas-data-lake
|
|
||||||
evg-test-atlas-data-lake:
|
|
||||||
ATLAS_DATA_LAKE_INTEGRATION_TEST=true go test -v ./mongo/integration -run TestUnifiedSpecs/atlas-data-lake-testing >> spec_test.suite
|
|
||||||
ATLAS_DATA_LAKE_INTEGRATION_TEST=true go test -v ./mongo/integration -run TestAtlasDataLake >> spec_test.suite
|
|
||||||
|
|
||||||
.PHONY: evg-test-enterprise-auth
|
|
||||||
evg-test-enterprise-auth:
|
|
||||||
go run -tags gssapi ./cmd/testentauth/main.go
|
|
||||||
|
|
||||||
.PHONY: evg-test-kmip
|
|
||||||
evg-test-kmip:
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionSpec/kmipKMS >> test.suite
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/data_key_and_double_encryption >> test.suite
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/corpus >> test.suite
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/custom_endpoint >> test.suite
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/kms_tls_options_test >> test.suite
|
|
||||||
|
|
||||||
.PHONY: evg-test-kms
|
|
||||||
evg-test-kms:
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse/kms_tls_tests >> test.suite
|
|
||||||
|
|
||||||
.PHONY: evg-test-load-balancers
|
|
||||||
evg-test-load-balancers:
|
|
||||||
# Load balancer should be tested with all unified tests as well as tests in the following
|
|
||||||
# components: retryable reads, retryable writes, change streams, initial DNS seedlist discovery.
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestUnifiedSpecs/retryable-reads -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestRetryableWritesSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestChangeStreamSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestInitialDNSSeedlistDiscoverySpec/load_balanced -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestLoadBalancerSupport -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration/unified -run TestUnifiedSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
|
|
||||||
.PHONY: evg-test-ocsp
|
|
||||||
evg-test-ocsp:
|
|
||||||
go test -v ./mongo -run TestOCSP $(OCSP_TLS_SHOULD_SUCCEED) >> test.suite
|
|
||||||
|
|
||||||
.PHONY: evg-test-serverless
|
|
||||||
evg-test-serverless:
|
|
||||||
# Serverless should be tested with all unified tests as well as tests in the following components: CRUD, load balancer,
|
|
||||||
# retryable reads, retryable writes, sessions, transactions and cursor behavior.
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestCrudSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestWriteErrorsWithLabels -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestWriteErrorsDetails -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestHintErrors -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestWriteConcernError -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestErrorsCodeNamePropagated -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestLoadBalancerSupport -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestUnifiedSpecs/retryable-reads -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestRetryableReadsProse -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestRetryableWritesSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestRetryableWritesProse -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestUnifiedSpecs/sessions -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestSessionsProse -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestUnifiedSpecs/transactions/legacy -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestConvenientTransactions -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration -run TestCursor -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test $(BUILD_TAGS) ./mongo/integration/unified -run TestUnifiedSpec -v -timeout $(TEST_TIMEOUT)s >> test.suite
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionSpec >> test.suite
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration -run TestClientSideEncryptionProse >> test.suite
|
|
||||||
|
|
||||||
.PHONY: evg-test-versioned-api
|
|
||||||
evg-test-versioned-api:
|
|
||||||
# Versioned API related tests are in the mongo, integration and unified packages.
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo >> test.suite
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration >> test.suite
|
|
||||||
go test -exec "env PKG_CONFIG_PATH=$(PKG_CONFIG_PATH) LD_LIBRARY_PATH=$(LD_LIBRARY_PATH)" $(BUILD_TAGS) -v -timeout $(TEST_TIMEOUT)s ./mongo/integration/unified >> test.suite
|
|
||||||
|
|
||||||
.PHONY: build-gcpkms-test
|
|
||||||
build-gcpkms-test:
|
|
||||||
go build $(BUILD_TAGS) ./cmd/testgcpkms
|
|
||||||
|
|
||||||
### Benchmark specific targets and support. ###
|
|
||||||
.PHONY: benchmark
|
|
||||||
benchmark:perf
|
|
||||||
go test $(BUILD_TAGS) -benchmem -bench=. ./benchmark
|
|
||||||
|
|
||||||
.PHONY: driver-benchmark
|
|
||||||
driver-benchmark:perf
|
|
||||||
@go run cmd/godriver-benchmark/main.go | tee perf.suite
|
|
||||||
|
|
||||||
perf:driver-test-data.tar.gz
|
|
||||||
tar -zxf $< $(if $(eq $(UNAME_S),Darwin),-s , --transform=s)/testdata/perf/
|
|
||||||
@touch $@
|
|
||||||
|
|
||||||
driver-test-data.tar.gz:
|
|
||||||
curl --retry 5 "https://s3.amazonaws.com/boxes.10gen.com/build/driver-test-data.tar.gz" -o driver-test-data.tar.gz --silent --max-time 120
|
|
||||||
251
mongo/README.md
251
mongo/README.md
@@ -1,251 +0,0 @@
|
|||||||
<p align="center"><img src="etc/assets/mongo-gopher.png" width="250"></p>
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://goreportcard.com/report/go.mongodb.org/mongo-driver"><img src="https://goreportcard.com/badge/go.mongodb.org/mongo-driver"></a>
|
|
||||||
<a href="https://pkg.go.dev/go.mongodb.org/mongo-driver/mongo"><img src="etc/assets/godev-mongo-blue.svg" alt="docs"></a>
|
|
||||||
<a href="https://pkg.go.dev/go.mongodb.org/mongo-driver/bson"><img src="etc/assets/godev-bson-blue.svg" alt="docs"></a>
|
|
||||||
<a href="https://www.mongodb.com/docs/drivers/go/current/"><img src="etc/assets/docs-mongodb-green.svg"></a>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
# MongoDB Go Driver
|
|
||||||
|
|
||||||
The MongoDB supported driver for Go.
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
- [Requirements](#requirements)
|
|
||||||
- [Installation](#installation)
|
|
||||||
- [Usage](#usage)
|
|
||||||
- [Feedback](#feedback)
|
|
||||||
- [Testing / Development](#testing--development)
|
|
||||||
- [Continuous Integration](#continuous-integration)
|
|
||||||
- [License](#license)
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
- Go 1.13 or higher. We aim to support the latest versions of Go.
|
|
||||||
- Go 1.20 or higher is required to run the driver test suite.
|
|
||||||
- MongoDB 3.6 and higher.
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
The recommended way to get started using the MongoDB Go driver is by using Go modules to install the dependency in
|
|
||||||
your project. This can be done either by importing packages from `go.mongodb.org/mongo-driver` and having the build
|
|
||||||
step install the dependency or by explicitly running
|
|
||||||
|
|
||||||
```bash
|
|
||||||
go get go.mongodb.org/mongo-driver/mongo
|
|
||||||
```
|
|
||||||
|
|
||||||
When using a version of Go that does not support modules, the driver can be installed using `dep` by running
|
|
||||||
|
|
||||||
```bash
|
|
||||||
dep ensure -add "go.mongodb.org/mongo-driver/mongo"
|
|
||||||
```
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
To get started with the driver, import the `mongo` package and create a `mongo.Client` with the `Connect` function:
|
|
||||||
|
|
||||||
```go
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"go.mongodb.org/mongo-driver/mongo"
|
|
||||||
"go.mongodb.org/mongo-driver/mongo/options"
|
|
||||||
"go.mongodb.org/mongo-driver/mongo/readpref"
|
|
||||||
)
|
|
||||||
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
client, err := mongo.Connect(ctx, options.Client().ApplyURI("mongodb://localhost:27017"))
|
|
||||||
```
|
|
||||||
|
|
||||||
Make sure to defer a call to `Disconnect` after instantiating your client:
|
|
||||||
|
|
||||||
```go
|
|
||||||
defer func() {
|
|
||||||
if err = client.Disconnect(ctx); err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
```
|
|
||||||
|
|
||||||
For more advanced configuration and authentication, see the [documentation for mongo.Connect](https://pkg.go.dev/go.mongodb.org/mongo-driver/mongo#Connect).
|
|
||||||
|
|
||||||
Calling `Connect` does not block for server discovery. If you wish to know if a MongoDB server has been found and connected to,
|
|
||||||
use the `Ping` method:
|
|
||||||
|
|
||||||
```go
|
|
||||||
ctx, cancel = context.WithTimeout(context.Background(), 2*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
err = client.Ping(ctx, readpref.Primary())
|
|
||||||
```
|
|
||||||
|
|
||||||
To insert a document into a collection, first retrieve a `Database` and then `Collection` instance from the `Client`:
|
|
||||||
|
|
||||||
```go
|
|
||||||
collection := client.Database("testing").Collection("numbers")
|
|
||||||
```
|
|
||||||
|
|
||||||
The `Collection` instance can then be used to insert documents:
|
|
||||||
|
|
||||||
```go
|
|
||||||
ctx, cancel = context.WithTimeout(context.Background(), 5*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
res, err := collection.InsertOne(ctx, bson.D{{"name", "pi"}, {"value", 3.14159}})
|
|
||||||
id := res.InsertedID
|
|
||||||
```
|
|
||||||
|
|
||||||
To use `bson.D`, you will need to add `"go.mongodb.org/mongo-driver/bson"` to your imports.
|
|
||||||
|
|
||||||
Your import statement should now look like this:
|
|
||||||
|
|
||||||
```go
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"log"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"go.mongodb.org/mongo-driver/bson"
|
|
||||||
"go.mongodb.org/mongo-driver/mongo"
|
|
||||||
"go.mongodb.org/mongo-driver/mongo/options"
|
|
||||||
"go.mongodb.org/mongo-driver/mongo/readpref"
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
Several query methods return a cursor, which can be used like this:
|
|
||||||
|
|
||||||
```go
|
|
||||||
ctx, cancel = context.WithTimeout(context.Background(), 30*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
cur, err := collection.Find(ctx, bson.D{})
|
|
||||||
if err != nil { log.Fatal(err) }
|
|
||||||
defer cur.Close(ctx)
|
|
||||||
for cur.Next(ctx) {
|
|
||||||
var result bson.D
|
|
||||||
err := cur.Decode(&result)
|
|
||||||
if err != nil { log.Fatal(err) }
|
|
||||||
// do something with result....
|
|
||||||
}
|
|
||||||
if err := cur.Err(); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For methods that return a single item, a `SingleResult` instance is returned:
|
|
||||||
|
|
||||||
```go
|
|
||||||
var result struct {
|
|
||||||
Value float64
|
|
||||||
}
|
|
||||||
filter := bson.D{{"name", "pi"}}
|
|
||||||
ctx, cancel = context.WithTimeout(context.Background(), 5*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
err = collection.FindOne(ctx, filter).Decode(&result)
|
|
||||||
if err == mongo.ErrNoDocuments {
|
|
||||||
// Do something when no record was found
|
|
||||||
fmt.Println("record does not exist")
|
|
||||||
} else if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
// Do something with result...
|
|
||||||
```
|
|
||||||
|
|
||||||
Additional examples and documentation can be found under the examples directory and [on the MongoDB Documentation website](https://www.mongodb.com/docs/drivers/go/current/).
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Feedback
|
|
||||||
|
|
||||||
For help with the driver, please post in the [MongoDB Community Forums](https://developer.mongodb.com/community/forums/tag/golang/).
|
|
||||||
|
|
||||||
New features and bugs can be reported on jira: https://jira.mongodb.org/browse/GODRIVER
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Testing / Development
|
|
||||||
|
|
||||||
The driver tests can be run against several database configurations. The most simple configuration is a standalone mongod with no auth, no ssl, and no compression. To run these basic driver tests, make sure a standalone MongoDB server instance is running at localhost:27017. To run the tests, you can run `make` (on Windows, run `nmake`). This will run coverage, run go-lint, run go-vet, and build the examples.
|
|
||||||
|
|
||||||
### Testing Different Topologies
|
|
||||||
|
|
||||||
To test a **replica set** or **sharded cluster**, set `MONGODB_URI="<connection-string>"` for the `make` command.
|
|
||||||
For example, for a local replica set named `rs1` comprised of three nodes on ports 27017, 27018, and 27019:
|
|
||||||
|
|
||||||
```
|
|
||||||
MONGODB_URI="mongodb://localhost:27017,localhost:27018,localhost:27019/?replicaSet=rs1" make
|
|
||||||
```
|
|
||||||
|
|
||||||
### Testing Auth and TLS
|
|
||||||
|
|
||||||
To test authentication and TLS, first set up a MongoDB cluster with auth and TLS configured. Testing authentication requires a user with the `root` role on the `admin` database. Here is an example command that would run a mongod with TLS correctly configured for tests. Either set or replace PATH_TO_SERVER_KEY_FILE and PATH_TO_CA_FILE with paths to their respective files:
|
|
||||||
|
|
||||||
```
|
|
||||||
mongod \
|
|
||||||
--auth \
|
|
||||||
--tlsMode requireTLS \
|
|
||||||
--tlsCertificateKeyFile $PATH_TO_SERVER_KEY_FILE \
|
|
||||||
--tlsCAFile $PATH_TO_CA_FILE \
|
|
||||||
--tlsAllowInvalidCertificates
|
|
||||||
```
|
|
||||||
|
|
||||||
To run the tests with `make`, set:
|
|
||||||
- `MONGO_GO_DRIVER_CA_FILE` to the location of the CA file used by the database
|
|
||||||
- `MONGO_GO_DRIVER_KEY_FILE` to the location of the client key file
|
|
||||||
- `MONGO_GO_DRIVER_PKCS8_ENCRYPTED_KEY_FILE` to the location of the pkcs8 client key file encrypted with the password string: `password`
|
|
||||||
- `MONGO_GO_DRIVER_PKCS8_UNENCRYPTED_KEY_FILE` to the location of the unencrypted pkcs8 key file
|
|
||||||
- `MONGODB_URI` to the connection string of the server
|
|
||||||
- `AUTH=auth`
|
|
||||||
- `SSL=ssl`
|
|
||||||
|
|
||||||
For example:
|
|
||||||
|
|
||||||
```
|
|
||||||
AUTH=auth SSL=ssl \
|
|
||||||
MONGO_GO_DRIVER_CA_FILE=$PATH_TO_CA_FILE \
|
|
||||||
MONGO_GO_DRIVER_KEY_FILE=$PATH_TO_CLIENT_KEY_FILE \
|
|
||||||
MONGO_GO_DRIVER_PKCS8_ENCRYPTED_KEY_FILE=$PATH_TO_ENCRYPTED_KEY_FILE \
|
|
||||||
MONGO_GO_DRIVER_PKCS8_UNENCRYPTED_KEY_FILE=$PATH_TO_UNENCRYPTED_KEY_FILE \
|
|
||||||
MONGODB_URI="mongodb://user:password@localhost:27017/?authSource=admin" \
|
|
||||||
make
|
|
||||||
```
|
|
||||||
|
|
||||||
Notes:
|
|
||||||
- The `--tlsAllowInvalidCertificates` flag is required on the server for the test suite to work correctly.
|
|
||||||
- The test suite requires the auth database to be set with `?authSource=admin`, not `/admin`.
|
|
||||||
|
|
||||||
### Testing Compression
|
|
||||||
|
|
||||||
The MongoDB Go Driver supports wire protocol compression using Snappy, zLib, or zstd. To run tests with wire protocol compression, set `MONGO_GO_DRIVER_COMPRESSOR` to `snappy`, `zlib`, or `zstd`. For example:
|
|
||||||
|
|
||||||
```
|
|
||||||
MONGO_GO_DRIVER_COMPRESSOR=snappy make
|
|
||||||
```
|
|
||||||
|
|
||||||
Ensure the [`--networkMessageCompressors` flag](https://www.mongodb.com/docs/manual/reference/program/mongod/#cmdoption-mongod-networkmessagecompressors) on mongod or mongos includes `zlib` if testing zLib compression.
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Contribution
|
|
||||||
|
|
||||||
Check out the [project page](https://jira.mongodb.org/browse/GODRIVER) for tickets that need completing. See our [contribution guidelines](docs/CONTRIBUTING.md) for details.
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Continuous Integration
|
|
||||||
|
|
||||||
Commits to master are run automatically on [evergreen](https://evergreen.mongodb.com/waterfall/mongo-go-driver).
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Frequently Encountered Issues
|
|
||||||
|
|
||||||
See our [common issues](docs/common-issues.md) documentation for troubleshooting frequently encountered issues.
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## Thanks and Acknowledgement
|
|
||||||
|
|
||||||
<a href="https://github.com/ashleymcnamara">@ashleymcnamara</a> - Mongo Gopher Artwork
|
|
||||||
|
|
||||||
-------------------------
|
|
||||||
## License
|
|
||||||
|
|
||||||
The MongoDB Go Driver is licensed under the [Apache License](LICENSE).
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,307 +0,0 @@
|
|||||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
// not use this file except in compliance with the License. You may obtain
|
|
||||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
package bson
|
|
||||||
|
|
||||||
import (
|
|
||||||
"compress/gzip"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
type encodetest struct {
|
|
||||||
Field1String string
|
|
||||||
Field1Int64 int64
|
|
||||||
Field1Float64 float64
|
|
||||||
Field2String string
|
|
||||||
Field2Int64 int64
|
|
||||||
Field2Float64 float64
|
|
||||||
Field3String string
|
|
||||||
Field3Int64 int64
|
|
||||||
Field3Float64 float64
|
|
||||||
Field4String string
|
|
||||||
Field4Int64 int64
|
|
||||||
Field4Float64 float64
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest1 struct {
|
|
||||||
Nested nestedtest2
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest2 struct {
|
|
||||||
Nested nestedtest3
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest3 struct {
|
|
||||||
Nested nestedtest4
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest4 struct {
|
|
||||||
Nested nestedtest5
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest5 struct {
|
|
||||||
Nested nestedtest6
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest6 struct {
|
|
||||||
Nested nestedtest7
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest7 struct {
|
|
||||||
Nested nestedtest8
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest8 struct {
|
|
||||||
Nested nestedtest9
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest9 struct {
|
|
||||||
Nested nestedtest10
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest10 struct {
|
|
||||||
Nested nestedtest11
|
|
||||||
}
|
|
||||||
|
|
||||||
type nestedtest11 struct {
|
|
||||||
Nested encodetest
|
|
||||||
}
|
|
||||||
|
|
||||||
var encodetestInstance = encodetest{
|
|
||||||
Field1String: "foo",
|
|
||||||
Field1Int64: 1,
|
|
||||||
Field1Float64: 3.0,
|
|
||||||
Field2String: "bar",
|
|
||||||
Field2Int64: 2,
|
|
||||||
Field2Float64: 3.1,
|
|
||||||
Field3String: "baz",
|
|
||||||
Field3Int64: 3,
|
|
||||||
Field3Float64: 3.14,
|
|
||||||
Field4String: "qux",
|
|
||||||
Field4Int64: 4,
|
|
||||||
Field4Float64: 3.141,
|
|
||||||
}
|
|
||||||
|
|
||||||
var nestedInstance = nestedtest1{
|
|
||||||
nestedtest2{
|
|
||||||
nestedtest3{
|
|
||||||
nestedtest4{
|
|
||||||
nestedtest5{
|
|
||||||
nestedtest6{
|
|
||||||
nestedtest7{
|
|
||||||
nestedtest8{
|
|
||||||
nestedtest9{
|
|
||||||
nestedtest10{
|
|
||||||
nestedtest11{
|
|
||||||
encodetest{
|
|
||||||
Field1String: "foo",
|
|
||||||
Field1Int64: 1,
|
|
||||||
Field1Float64: 3.0,
|
|
||||||
Field2String: "bar",
|
|
||||||
Field2Int64: 2,
|
|
||||||
Field2Float64: 3.1,
|
|
||||||
Field3String: "baz",
|
|
||||||
Field3Int64: 3,
|
|
||||||
Field3Float64: 3.14,
|
|
||||||
Field4String: "qux",
|
|
||||||
Field4Int64: 4,
|
|
||||||
Field4Float64: 3.141,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const extendedBSONDir = "../testdata/extended_bson"
|
|
||||||
|
|
||||||
// readExtJSONFile reads the GZIP-compressed extended JSON document from the given filename in the
|
|
||||||
// "extended BSON" test data directory (../testdata/extended_bson) and returns it as a
|
|
||||||
// map[string]interface{}. It panics on any errors.
|
|
||||||
func readExtJSONFile(filename string) map[string]interface{} {
|
|
||||||
filePath := path.Join(extendedBSONDir, filename)
|
|
||||||
file, err := os.Open(filePath)
|
|
||||||
if err != nil {
|
|
||||||
panic(fmt.Sprintf("error opening file %q: %s", filePath, err))
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
_ = file.Close()
|
|
||||||
}()
|
|
||||||
|
|
||||||
gz, err := gzip.NewReader(file)
|
|
||||||
if err != nil {
|
|
||||||
panic(fmt.Sprintf("error creating GZIP reader: %s", err))
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
_ = gz.Close()
|
|
||||||
}()
|
|
||||||
|
|
||||||
data, err := ioutil.ReadAll(gz)
|
|
||||||
if err != nil {
|
|
||||||
panic(fmt.Sprintf("error reading GZIP contents of file: %s", err))
|
|
||||||
}
|
|
||||||
|
|
||||||
var v map[string]interface{}
|
|
||||||
err = UnmarshalExtJSON(data, false, &v)
|
|
||||||
if err != nil {
|
|
||||||
panic(fmt.Sprintf("error unmarshalling extended JSON: %s", err))
|
|
||||||
}
|
|
||||||
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkMarshal(b *testing.B) {
|
|
||||||
cases := []struct {
|
|
||||||
desc string
|
|
||||||
value interface{}
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "simple struct",
|
|
||||||
value: encodetestInstance,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nested struct",
|
|
||||||
value: nestedInstance,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "deep_bson.json.gz",
|
|
||||||
value: readExtJSONFile("deep_bson.json.gz"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "flat_bson.json.gz",
|
|
||||||
value: readExtJSONFile("flat_bson.json.gz"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "full_bson.json.gz",
|
|
||||||
value: readExtJSONFile("full_bson.json.gz"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tc := range cases {
|
|
||||||
b.Run(tc.desc, func(b *testing.B) {
|
|
||||||
b.Run("BSON", func(b *testing.B) {
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
_, err := Marshal(tc.value)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error marshalling BSON: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
b.Run("extJSON", func(b *testing.B) {
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
_, err := MarshalExtJSON(tc.value, true, false)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error marshalling extended JSON: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
b.Run("JSON", func(b *testing.B) {
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
_, err := json.Marshal(tc.value)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error marshalling JSON: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkUnmarshal(b *testing.B) {
|
|
||||||
cases := []struct {
|
|
||||||
desc string
|
|
||||||
value interface{}
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "simple struct",
|
|
||||||
value: encodetestInstance,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nested struct",
|
|
||||||
value: nestedInstance,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "deep_bson.json.gz",
|
|
||||||
value: readExtJSONFile("deep_bson.json.gz"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "flat_bson.json.gz",
|
|
||||||
value: readExtJSONFile("flat_bson.json.gz"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "full_bson.json.gz",
|
|
||||||
value: readExtJSONFile("full_bson.json.gz"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tc := range cases {
|
|
||||||
b.Run(tc.desc, func(b *testing.B) {
|
|
||||||
b.Run("BSON", func(b *testing.B) {
|
|
||||||
data, err := Marshal(tc.value)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error marshalling BSON: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
b.ResetTimer()
|
|
||||||
var v2 map[string]interface{}
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
err := Unmarshal(data, &v2)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error unmarshalling BSON: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
b.Run("extJSON", func(b *testing.B) {
|
|
||||||
data, err := MarshalExtJSON(tc.value, true, false)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error marshalling extended JSON: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
b.ResetTimer()
|
|
||||||
var v2 map[string]interface{}
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
err := UnmarshalExtJSON(data, true, &v2)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error unmarshalling extended JSON: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
b.Run("JSON", func(b *testing.B) {
|
|
||||||
data, err := json.Marshal(tc.value)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error marshalling JSON: %s", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
b.ResetTimer()
|
|
||||||
var v2 map[string]interface{}
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
err := json.Unmarshal(data, &v2)
|
|
||||||
if err != nil {
|
|
||||||
b.Errorf("error unmarshalling JSON: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
// Copyright (C) MongoDB, Inc. 2017-present.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
// not use this file except in compliance with the License. You may obtain
|
|
||||||
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Based on gopkg.in/mgo.v2/bson by Gustavo Niemeyer
|
|
||||||
// See THIRD-PARTY-NOTICES for original license terms.
|
|
||||||
|
|
||||||
package bson // import "go.mongodb.org/mongo-driver/bson"
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Zeroer allows custom struct types to implement a report of zero
|
|
||||||
// state. All struct types that don't implement Zeroer or where IsZero
|
|
||||||
// returns false are considered to be not zero.
|
|
||||||
type Zeroer interface {
|
|
||||||
IsZero() bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// D is an ordered representation of a BSON document. This type should be used when the order of the elements matters,
|
|
||||||
// such as MongoDB command documents. If the order of the elements does not matter, an M should be used instead.
|
|
||||||
//
|
|
||||||
// A D should not be constructed with duplicate key names, as that can cause undefined server behavior.
|
|
||||||
//
|
|
||||||
// Example usage:
|
|
||||||
//
|
|
||||||
// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
|
|
||||||
type D = primitive.D
|
|
||||||
|
|
||||||
// E represents a BSON element for a D. It is usually used inside a D.
|
|
||||||
type E = primitive.E
|
|
||||||
|
|
||||||
// M is an unordered representation of a BSON document. This type should be used when the order of the elements does not
|
|
||||||
// matter. This type is handled as a regular map[string]interface{} when encoding and decoding. Elements will be
|
|
||||||
// serialized in an undefined, random order. If the order of the elements matters, a D should be used instead.
|
|
||||||
//
|
|
||||||
// Example usage:
|
|
||||||
//
|
|
||||||
// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159}
|
|
||||||
type M = primitive.M
|
|
||||||
|
|
||||||
// An A is an ordered representation of a BSON array.
|
|
||||||
//
|
|
||||||
// Example usage:
|
|
||||||
//
|
|
||||||
// bson.A{"bar", "world", 3.14159, bson.D{{"qux", 12345}}}
|
|
||||||
type A = primitive.A
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user