Compare commits

...

131 Commits

Author SHA1 Message Date
398ed56d32 v0.0.305
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m21s
2023-11-09 09:35:56 +01:00
f3ecba3883 v0.0.304 add support for WithModifyingPipeline to wmo
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2023-11-09 09:26:46 +01:00
45031b05cf v0.0.303
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m35s
2023-11-08 19:01:15 +01:00
7413ea045d v0.0.302
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m41s
2023-11-08 18:53:02 +01:00
62c9a4e734 v0.0.301 pagination (page+limit) support in wmo
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m11s
2023-11-08 18:30:30 +01:00
3a8baaa6d9 v0.0.300 add custom unmarshal-hooks to wmo
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m45s
2023-11-04 18:55:44 +01:00
498785e213 v0.0.299 pctx.RawBody( *[]byte )
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m41s
2023-11-03 16:53:41 +01:00
678f95642c v0.0.298 use go/format instead of manual command invocation
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m6s
2023-11-01 04:20:08 +01:00
dacc97e2ce v0.0.297
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m1s
2023-11-01 00:31:51 +01:00
f8c0c0afa0 v0.0.296 add csid.generateIDFromSeed
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m4s
2023-11-01 00:29:58 +01:00
2fbd5cf965 v0.0.295 added generic base-conversion algorithm
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m4s
2023-11-01 00:23:17 +01:00
75f71fe3db v0.0.294 migrate bfcodegen to templates
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 58s
2023-10-31 22:58:28 +01:00
ab1a1ab6f6 v0.0.293 fix NPE
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m52s
2023-10-30 13:37:31 +01:00
19ee5019ef v0.0.292
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m11s
2023-10-30 10:14:38 +01:00
42b68507f2 v0.0.291
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 56s
2023-10-26 13:02:45 +02:00
9d0047a11e v0.0.290 csid
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m38s
2023-10-26 13:01:58 +02:00
06d81f1682 v0.0.289 fsext
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m31s
2023-10-26 11:29:08 +02:00
7b8ab03779 v0.0.288 default to recursive-error-msg in exerr.Error()
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m54s
2023-10-19 14:16:01 +02:00
07cbcf5a0a v0.0.287 fix bug in confext::ApplyEnvOverrides if a struct env key exists in the os.env
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m37s
2023-10-12 10:02:42 +02:00
da41ec3e84 run CICD tests without doker workaround
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m37s
2023-10-11 15:50:09 +02:00
592fae25af v0.0.286 allow spaces in enum-keys
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m14s
2023-10-11 11:27:18 +02:00
7968460fa2 v0.0.285
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m0s
2023-10-09 15:25:30 +02:00
b808c5727c v0.0.284
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m14s
2023-10-09 15:22:57 +02:00
796f7956b8 v0.0.283
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m5s
2023-10-09 15:17:22 +02:00
1e6b92d1d9 v0.0.282 ginext bugfix
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 48s
2023-10-09 09:23:40 +02:00
0b85fa5af9 v0.0.281 typo fix
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 50s
2023-10-09 09:04:07 +02:00
c3318cc1de v0.0.280 DYN-166 ginext jsonfilter middleware
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 51s
2023-10-09 09:02:37 +02:00
fbf4d7b915 v0.0.279 DYN-166 ginext jsonfilter middleware
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m12s
2023-10-09 08:55:22 +02:00
9cc0abf9e0 v0.0.278 DYN-166 bugfix jsonfilter
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 52s
2023-10-05 12:54:07 +02:00
7c40bcfd3c v0.0.277 DYN-166 json marshal filter in ginext Write
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 54s
2023-10-05 12:00:51 +02:00
05636a1e4d v0.0.276
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m12s
2023-10-05 10:59:20 +02:00
0f52b860ea DYN-166 add jsonfilter to json library
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 56s
2023-10-05 10:57:34 +02:00
b5cd116219 DYN-166 add jsonfilter to json library
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 47s
2023-10-05 10:45:09 +02:00
98486842ae v0.0.275 fix missing returns in (v MetaValue) ShortString
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 54s
2023-09-29 16:00:40 +02:00
7577a2dd47 v0.0.274 limit exerr log meta values (shortlog) to 240 chars
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 50s
2023-09-27 16:18:21 +02:00
08681756b6 v0.0.273 add stack to PanicWrappedErr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m9s
2023-09-27 14:15:59 +02:00
64772d0474 v0.0.272 WMO: fix FindOneAndReplace not using FindOneAndReplace
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 50s
2023-09-26 14:41:15 +02:00
127764556e Merge branch 'master' of ssh://gogs.mikescher.com:8022/BlackForestBytes/goext 2023-09-26 14:41:06 +02:00
170f43d806 WMO: fix FindOneAndReplace not using FindOneAndReplace 2023-09-26 14:40:56 +02:00
9dffc41274 v0.0.271 return old value in AtomicBool::Set
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 53s
2023-09-26 14:32:45 +02:00
c63cf442f8 try to fix test 'cmdext:TestFailOnStderr'
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 39s
2023-09-25 18:04:56 +02:00
a2ba283632 v0.0.270 fix inversion of AssertDeepEqual
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 54s
2023-09-25 11:35:03 +02:00
4a1fb1ae18 fix inversion of AssertDeepEqual 2023-09-25 11:34:51 +02:00
a127b24e62 v0.0.269 add AssertSetDeepEqual
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m8s
2023-09-25 09:18:22 +02:00
69d6290376 add AssertSetDeepEqual 2023-09-25 09:18:07 +02:00
c08a739158 v0.0.268 added WeekStart() and WeekEnd()
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 50s
2023-09-21 16:29:23 +02:00
5f5f0e44f0 v0.0.267 fix AssertDeepEqual
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 50s
2023-09-21 14:15:02 +02:00
6e6797eac5 fix AssertDeepEqual 2023-09-21 14:14:51 +02:00
cd9406900a v0.0.266 fix tst showing wrong file:line
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m22s
2023-09-21 13:08:13 +02:00
6c81f7f6bc fix tst showing wrong file:line, add DeepEqual 2023-09-21 13:07:55 +02:00
d56a0235af v0.0.265 add ListWithCount
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 49s
2023-09-18 12:57:27 +02:00
de2ca763c1 add function for ListWithCount 2023-09-18 12:56:56 +02:00
da52bb5c90 v0.0.264 added Valid() to id-gen
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 49s
2023-09-18 11:46:17 +02:00
3d4afe7b25 v0.0.263 re-add checksum guard to id-generate
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 50s
2023-09-18 10:43:29 +02:00
f5766d639c v0.0.262 ignore _gen files in bfcodegen checksum-calc
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 46s
2023-09-18 10:42:43 +02:00
cdf2a6e76b v0.0.261 added id-generate.go
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m7s
2023-09-18 10:38:25 +02:00
6d7cfb86f8 v0.0.260 wmo: fix endless recursion in wmo reflection
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 50s
2023-09-12 11:40:39 +02:00
1e9d663ffe fix endless recursion in wmo reflection 2023-09-12 11:39:51 +02:00
5b8d7ebf87 v0.0.259 wmo: allow fields to pointers to structs
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 49s
2023-09-12 10:48:57 +02:00
11dc6d2640 use type instead of value for Reflection in Coll.initFields 2023-09-12 10:47:41 +02:00
29a3f73f15 v0.0.258
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2023-09-11 11:28:34 +02:00
98105642fc removed default sort 2023-09-11 11:28:26 +02:00
0fd5f3b417 v0.0.257 better handling if pagination is faulty in wmo list
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m2s
2023-09-05 15:01:55 +02:00
43cac4b3bb v0.0.256 bind header
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m4s
2023-08-28 10:44:38 +02:00
cd68af8e66 v0.0.255 tuples
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m43s
2023-08-24 09:47:32 +02:00
113d838876 v0.0.254 revert back to 0.0.250
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m14s
2023-08-22 10:49:57 +02:00
9e5bc0d3ea v0.0.253
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m23s
2023-08-22 10:36:35 +02:00
6d3bd13f61 v0.0.252
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m5s
2023-08-22 10:23:04 +02:00
b5ca475b3f v0.0.251 exerr.WithStackSkip
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m9s
2023-08-22 10:21:13 +02:00
a75b1291cb v0.0.250
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 51s
2023-08-21 15:34:27 +02:00
21cd1ee066 v0.0.249 better MDTAny json serialization
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 54s
2023-08-21 15:19:40 +02:00
ae43cbb623 v0.0.248 exerr in wmo package
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 55s
2023-08-21 15:08:35 +02:00
9b752a911c v0.0.247 -.-
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m12s
2023-08-21 14:23:44 +02:00
ec9ac26a4c v0.0.246 timeext
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m11s
2023-08-21 14:15:06 +02:00
39a0b73d56 v0.0.245
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 40s
2023-08-21 13:27:36 +02:00
2e2e15d4d2 v0.0.244
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 48s
2023-08-18 13:27:02 +02:00
0d16946aba v0.0.243
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m18s
2023-08-18 13:25:18 +02:00
14441c2378 Adde gitea workflow: tests
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 57s
2023-08-14 18:39:22 +02:00
f6bcdc9903 Merge remote-tracking branch 'origin/master' 2023-08-14 16:33:03 +02:00
a95053211c Fix tests 2023-08-14 16:32:39 +02:00
813ce71e3e v0.0.242 forgot to return something 2023-08-14 16:05:12 +02:00
56ae0cfc6c v0.0.241 join string array 2023-08-14 15:54:50 +02:00
202afc9068 v0.0.240 2023-08-14 15:36:12 +02:00
56094b3cb6 v0.0.239 pctx.WithTImeout 2023-08-11 16:32:34 +02:00
0da098e9f9 v0.0.238 2023-08-09 19:51:41 +02:00
f0881c9fd6 v0.0.237 parse application/x-www-form-urlencoded in ginext 2023-08-09 19:35:01 +02:00
029b408749 v0.0.236 cmdext.FailOnStdErr 2023-08-09 17:48:06 +02:00
84b2be3169 v0.0.235 added .Enum(..) to exerr 2023-08-09 14:40:16 +02:00
c872cecc67 v0.0.234 2023-08-09 10:39:14 +02:00
99cd92729e v0.0.233 IncludeMetaInGinOutput 2023-08-09 10:37:59 +02:00
ac416f7b69 v0.0.232 2023-08-08 18:01:00 +02:00
e10140e143 v0.0.231 2023-08-08 16:10:31 +02:00
e165f0f62f v0.0.230 2023-08-08 16:09:02 +02:00
655d4daad9 v0.0.229 2023-08-08 16:05:44 +02:00
87a004e577 v0.0.228 bf 2023-08-08 15:33:52 +02:00
376c6cab50 v0.0.227 error on duplicate exerr.ErrorType 2023-08-08 15:28:29 +02:00
4a3f25baa0 v0.0.226 2023-08-08 14:28:09 +02:00
aa33bc8df3 v0.0.225 2023-08-08 13:09:15 +02:00
96b3718375 v0.0.224 implement error.As(x) for exerr 2023-08-08 12:38:22 +02:00
5f9b55933b v0.0.223 2023-08-08 11:52:40 +02:00
74d42637e7 v0.0.222 forgot status code 2023-08-06 19:11:59 +02:00
0c05bcf29b v0.0.221 download file data 2023-08-06 19:10:31 +02:00
9136143f2f v0.0.220 add ginext.bufferBody 2023-08-03 09:09:27 +02:00
2f1b784dc2 v0.0.219 implement error.Is(*) for exerr 2023-07-28 15:42:12 +02:00
190584e0e6 v0.0.218 bf 2023-07-27 17:16:30 +02:00
b7003b9ec9 v0.0.217 2023-07-27 17:12:41 +02:00
4f871271e8 v0.0.216 2023-07-27 17:00:53 +02:00
91f4793678 v0.0.215 Add (ee *ExErr) ToAPIJson 2023-07-27 14:37:11 +02:00
3b30bb049e v0.0.214 reassign innerctx 2023-07-27 09:58:10 +02:00
f0c5b36ea9 v0.0.213 inject gin key value pairs into context 2023-07-27 09:46:06 +02:00
647ec64c3b v0.0.212 2023-07-26 10:44:26 +02:00
b5f9b6b638 v0.0.211 2023-07-26 10:40:42 +02:00
c7949febf2 v0.0.210 fix ginext route dump 2023-07-25 11:16:11 +02:00
15a4b2a713 v0.0.209 removed g context from err func 2023-07-25 10:56:03 +02:00
493c6ebae8 v0.0.208 remove context from err functions because its not used 2023-07-25 10:51:14 +02:00
fb847b03af v0.0.207 renamed APIError to Error 2023-07-25 10:47:00 +02:00
f826633e6e v0.0.206 2023-07-24 18:50:14 +02:00
edeae23bf1 v0.0.205 2023-07-24 18:47:48 +02:00
a038b86147 v0.0.204 2023-07-24 18:42:33 +02:00
ede0b99d3a v0.0.203 2023-07-24 18:38:04 +02:00
d04ce18eb0 v0.0.202 2023-07-24 18:34:56 +02:00
8ae9a0f107 v0.0.201 2023-07-24 18:22:36 +02:00
a259bb6dbc v0.0.200 2023-07-24 17:42:18 +02:00
adf32568ee v0.0.199 2023-07-24 17:23:38 +02:00
0cfa159cb1 v0.0.198 2023-07-24 14:16:02 +02:00
0ead99608a v0.0.197 2023-07-24 12:27:06 +02:00
7fe3e66cad v0.0.196 2023-07-24 11:47:47 +02:00
a73d7d1654 v0.0.195 2023-07-24 11:42:52 +02:00
bbd7a7bc2c v0.0.194 2023-07-24 11:40:47 +02:00
f5151eb214 v0.0.193 2023-07-24 11:38:57 +02:00
eefb9ac9f5 v0.0.192 2023-07-24 11:30:07 +02:00
93 changed files with 3647 additions and 880 deletions

View File

@@ -0,0 +1,36 @@
# https://docs.gitea.com/next/usage/actions/quickstart
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions
# https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
name: Build Docker and Deploy
run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }}
on: [push]
jobs:
run_tests:
name: Run goext test-suite
runs-on: bfb-cicd-latest
steps:
- name: Check out code
uses: actions/checkout@v3
- name: Setup go
uses: actions/setup-go@v4
with:
go-version-file: '${{ gitea.workspace }}/go.mod'
- name: Setup packages
uses: awalsh128/cache-apt-pkgs-action@latest
with:
packages: curl python3
version: 1.0
- name: go version
run: go version
- name: Run tests
run: cd "${{ gitea.workspace }}" && make test

6
.idea/golinter.xml generated Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GoLinterSettings">
<option name="checkGoLinterExe" value="false" />
</component>
</project>

View File

@@ -5,7 +5,13 @@ run:
test: test:
# go test ./... # go test ./...
which gotestsum || go install gotest.tools/gotestsum@latest which gotestsum || go install gotest.tools/gotestsum@latest
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test" gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./..."
test-in-docker:
tag="goext_temp_test_image:$(shell uuidgen | tr -d '-')"; \
docker build --tag $$tag . -f .gitea/workflows/Dockerfile_tests; \
docker run --rm $$tag; \
docker rmi $$tag
version: version:
_data/version.sh _data/version.sh

View File

@@ -10,32 +10,34 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
### Packages: ### Packages:
| Name | Maintainer | Description | | Name | Maintainer | Description |
|--------------|------------|---------------------------------------------------------------------------------------------------------------| |-------------|------------|---------------------------------------------------------------------------------------------------------------|
| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) | | langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) |
| mathext | Mike | Utility/Helper functions for math | | mathext | Mike | Utility/Helper functions for math |
| cryptext | Mike | Utility/Helper functions for encryption | | cryptext | Mike | Utility/Helper functions for encryption |
| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels | | syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels |
| dataext | Mike | Various useful data structures | | dataext | Mike | Various useful data structures |
| zipext | Mike | Utility for zip/gzip/tar etc | | zipext | Mike | Utility for zip/gzip/tar etc |
| reflectext | Mike | Utility for golagn reflection | | reflectext | Mike | Utility for golang reflection |
| | | | | fsext | Mike | Utility for filesytem access |
| mongoext | Mike | Utility/Helper functions for mongodb | | | | |
| cursortoken | Mike | MongoDB cursortoken implementation | | mongoext | Mike | Utility/Helper functions for mongodb |
| | | | | cursortoken | Mike | MongoDB cursortoken implementation |
| totpext | Mike | Implementation of TOTP (2-Factor-Auth) | | pagination | Mike | Pagination implementation |
| termext | Mike | Utilities for terminals (mostly color output) | | | | |
| confext | Mike | Parses environment configuration into structs | | totpext | Mike | Implementation of TOTP (2-Factor-Auth) |
| cmdext | Mike | Runner for external commands/processes | | termext | Mike | Utilities for terminals (mostly color output) |
| | | | | confext | Mike | Parses environment configuration into structs |
| sq | Mike | Utility functions for sql based databases | | cmdext | Mike | Runner for external commands/processes |
| tst | Mike | Utility functions for unit tests | | | | |
| | | | | sq | Mike | Utility functions for sql based databases |
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json | | tst | Mike | Utility functions for unit tests |
| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps | | | | |
| | | | | rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
| bfcodegen | Mike | Various codegen tools (run via go generate) | | gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps |
| | | | | | | |
| rext | Mike | Regex Wrapper, wraps regexp with a better interface | | bfcodegen | Mike | Various codegen tools (run via go generate) |
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface | | | | |
| | | | | rext | Mike | Regex Wrapper, wraps regexp with a better interface |
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
| | | |

12
TODO.md
View File

@@ -2,12 +2,6 @@
- cronext - cronext
- cursortoken - rfctime.DateOnly
- rfctime.HMSTimeOnly
- typed/geenric mongo wrapper - rfctime.NanoTimeOnly
- error package
- rfctime.DateOnly
- rfctime.HMSTimeOnly
- rfctime.NanoTimeOnly

BIN
bfcodegen/_test_example.tgz Normal file

Binary file not shown.

182
bfcodegen/csid-generate.go Normal file
View File

@@ -0,0 +1,182 @@
package bfcodegen
import (
"bytes"
_ "embed"
"errors"
"fmt"
"go/format"
"gogs.mikescher.com/BlackForestBytes/goext"
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"io"
"os"
"path"
"path/filepath"
"regexp"
"strings"
"text/template"
)
type CSIDDef struct {
File string
FileRelative string
Name string
Prefix string
}
var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`))
var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
//go:embed csid-generate.template
var templateCSIDGenerateText string
func GenerateCharsetIDSpecs(sourceDir string, destFile string) error {
files, err := os.ReadDir(sourceDir)
if err != nil {
return err
}
oldChecksum := "N/A"
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
content, err := os.ReadFile(destFile)
if err != nil {
return err
}
if m, ok := rexCSIDChecksumConst.MatchFirst(string(content)); ok {
oldChecksum = m.GroupByName("cs").Value()
}
}
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
newChecksumStr := goext.GoextVersion
for _, f := range files {
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
if err != nil {
return err
}
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
}
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
if newChecksum != oldChecksum {
fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
} else {
fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
return nil
}
allIDs := make([]CSIDDef, 0)
pkgname := ""
for _, f := range files {
fmt.Printf("========= %s =========\n\n", f.Name())
fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()))
if err != nil {
return err
}
fmt.Printf("\n")
allIDs = append(allIDs, fileIDs...)
if pn != "" {
pkgname = pn
}
}
if pkgname == "" {
return errors.New("no package name found in any file")
}
fdata, err := format.Source([]byte(fmtCSIDOutput(newChecksum, allIDs, pkgname)))
if err != nil {
return err
}
err = os.WriteFile(destFile, fdata, 0o755)
if err != nil {
return err
}
return nil
}
func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) {
file, err := os.Open(fn)
if err != nil {
return nil, "", err
}
defer func() { _ = file.Close() }()
bin, err := io.ReadAll(file)
if err != nil {
return nil, "", err
}
lines := strings.Split(string(bin), "\n")
ids := make([]CSIDDef, 0)
pkgname := ""
for i, line := range lines {
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
break
}
if match, ok := rexCSIDPackage.MatchFirst(line); i == 0 && ok {
pkgname = match.GroupByName("name").Value()
continue
}
if match, ok := rexCSIDDef.MatchFirst(line); ok {
rfp, err := filepath.Rel(basedir, fn)
if err != nil {
return nil, "", err
}
def := CSIDDef{
File: fn,
FileRelative: rfp,
Name: match.GroupByName("name").Value(),
Prefix: match.GroupByName("prefix").Value(),
}
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
ids = append(ids, def)
}
}
return ids, pkgname, nil
}
func fmtCSIDOutput(cs string, ids []CSIDDef, pkgname string) string {
templ := template.Must(template.New("csid-generate").Parse(templateCSIDGenerateText))
buffer := bytes.Buffer{}
err := templ.Execute(&buffer, langext.H{
"PkgName": pkgname,
"Checksum": cs,
"GoextVersion": goext.GoextVersion,
"IDs": ids,
})
if err != nil {
panic(err)
}
return buffer.String()
}

View File

@@ -0,0 +1,190 @@
// Code generated by csid-generate.go DO NOT EDIT.
package {{.PkgName}}
import "crypto/rand"
import "crypto/sha256"
import "fmt"
import "github.com/go-playground/validator/v10"
import "github.com/rs/zerolog/log"
import "gogs.mikescher.com/BlackForestBytes/goext/exerr"
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import "gogs.mikescher.com/BlackForestBytes/goext/rext"
import "math/big"
import "reflect"
import "regexp"
import "strings"
const ChecksumCharsetIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
const idlen = 24
const checklen = 1
const idCharset = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
const idCharsetLen = len(idCharset)
var charSetReverseMap = generateCharsetMap()
const ({{range .IDs}}
prefix{{.Name}} = "{{.Prefix}}" {{end}}
)
var ({{range .IDs}}
regex{{.Name}} = generateRegex(prefix{{.Name}}) {{end}}
)
func generateRegex(prefix string) rext.Regex {
return rext.W(regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen)))
}
func generateCharsetMap() []int {
result := make([]int, 128)
for i := 0; i < len(result); i++ {
result[i] = -1
}
for idx, chr := range idCharset {
result[int(chr)] = idx
}
return result
}
func generateID(prefix string) string {
k := ""
csMax := big.NewInt(int64(idCharsetLen))
checksum := 0
for i := 0; i < idlen-len(prefix)-checklen; i++ {
v, err := rand.Int(rand.Reader, csMax)
if err != nil {
panic(err)
}
v64 := v.Int64()
k += string(idCharset[v64])
checksum = (checksum + int(v64)) % (idCharsetLen)
}
checkstr := string(idCharset[checksum%idCharsetLen])
return prefix + k + checkstr
}
func generateIDFromSeed(prefix string, seed string) string {
h := sha256.New()
iddata := ""
for len(iddata) < idlen-len(prefix)-checklen {
h.Write([]byte(seed))
bs := h.Sum(nil)
iddata += langext.NewAnyBaseConverter(idCharset).Encode(bs)
}
checksum := 0
for i := 0; i < idlen-len(prefix)-checklen; i++ {
ichr := int(iddata[i])
checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen)
}
checkstr := string(idCharset[checksum%idCharsetLen])
return prefix + iddata[:(idlen-len(prefix)-checklen)] + checkstr
}
func validateID(prefix string, value string) error {
if len(value) != idlen {
return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build()
}
if !strings.HasPrefix(value, prefix) {
return exerr.New(exerr.TypeInvalidCSID, "id is missing the correct prefix").Str("value", value).Str("prefix", prefix).Build()
}
checksum := 0
for i := len(prefix); i < len(value)-checklen; i++ {
ichr := int(value[i])
if ichr < 0 || ichr >= len(charSetReverseMap) || charSetReverseMap[ichr] == -1 {
return exerr.New(exerr.TypeInvalidCSID, "id contains invalid characters").Str("value", value).Build()
}
checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen)
}
checkstr := string(idCharset[checksum%idCharsetLen])
if !strings.HasSuffix(value, checkstr) {
return exerr.New(exerr.TypeInvalidCSID, "id checkstring is invalid").Str("value", value).Str("checkstr", checkstr).Build()
}
return nil
}
func getRawData(prefix string, value string) string {
if len(value) != idlen {
return ""
}
return value[len(prefix) : idlen-checklen]
}
func getCheckString(prefix string, value string) string {
if len(value) != idlen {
return ""
}
return value[idlen-checklen:]
}
func ValidateEntityID(vfl validator.FieldLevel) bool {
if !vfl.Field().CanInterface() {
log.Error().Msgf("Failed to validate EntityID (cannot interface ?!?)")
return false
}
ifvalue := vfl.Field().Interface()
if value1, ok := ifvalue.(EntityID); ok {
if vfl.Field().Type().Kind() == reflect.Pointer && langext.IsNil(value1) {
return true
}
if err := value1.Valid(); err != nil {
log.Debug().Msgf("Failed to validate EntityID '%s' (%s)", value1.String(), err.Error())
return false
} else {
return true
}
} else {
log.Error().Msgf("Failed to validate EntityID (wrong type: %T)", ifvalue)
return false
}
}
{{range .IDs}}
// ================================ {{.Name}} ({{.FileRelative}}) ================================
func New{{.Name}}() {{.Name}} {
return {{.Name}}(generateID(prefix{{.Name}}))
}
func (id {{.Name}}) Valid() error {
return validateID(prefix{{.Name}}, string(id))
}
func (i {{.Name}}) String() string {
return string(i)
}
func (i {{.Name}}) Prefix() string {
return prefix{{.Name}}
}
func (id {{.Name}}) Raw() string {
return getRawData(prefix{{.Name}}, string(id))
}
func (id {{.Name}}) CheckString() string {
return getCheckString(prefix{{.Name}}, string(id))
}
func (id {{.Name}}) Regex() rext.Regex {
return regex{{.Name}}
}
{{end}}

View File

@@ -0,0 +1,52 @@
package bfcodegen
import (
_ "embed"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"os"
"path/filepath"
"testing"
"time"
)
//go:embed _test_example.tgz
var CSIDExampleModels []byte
func TestGenerateCSIDSpecs(t *testing.T) {
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
err := os.WriteFile(tmpFile, CSIDExampleModels, 0o777)
tst.AssertNoErr(t, err)
t.Cleanup(func() { _ = os.Remove(tmpFile) })
err = os.Mkdir(tmpDir, 0o777)
tst.AssertNoErr(t, err)
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
tst.AssertNoErr(t, err)
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go")
tst.AssertNoErr(t, err)
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go")
tst.AssertNoErr(t, err)
fmt.Println()
fmt.Println()
fmt.Println()
fmt.Println("=====================================================================================================")
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/csid_gen.go"))(t)))
fmt.Println("=====================================================================================================")
fmt.Println()
fmt.Println()
fmt.Println()
}

View File

@@ -1,10 +1,12 @@
package bfcodegen package bfcodegen
import ( import (
"bytes"
_ "embed"
"errors" "errors"
"fmt" "fmt"
"go/format"
"gogs.mikescher.com/BlackForestBytes/goext" "gogs.mikescher.com/BlackForestBytes/goext"
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
"gogs.mikescher.com/BlackForestBytes/goext/cryptext" "gogs.mikescher.com/BlackForestBytes/goext/cryptext"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext" "gogs.mikescher.com/BlackForestBytes/goext/rext"
@@ -14,7 +16,7 @@ import (
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings" "strings"
"time" "text/template"
) )
type EnumDefVal struct { type EnumDefVal struct {
@@ -31,13 +33,16 @@ type EnumDef struct {
Values []EnumDefVal Values []EnumDefVal
} }
var rexPackage = rext.W(regexp.MustCompile("^package\\s+(?P<name>[A-Za-z0-9_]+)\\s*$")) var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*//\\s*(@enum:type).*$")) var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$")) var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`))
var rexChecksumConst = rext.W(regexp.MustCompile("const ChecksumGenerator = \"(?P<cs>[A-Za-z0-9_]*)\"")) var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
//go:embed enum-generate.template
var templateEnumGenerateText string
func GenerateEnumSpecs(sourceDir string, destFile string) error { func GenerateEnumSpecs(sourceDir string, destFile string) error {
@@ -52,13 +57,14 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
if err != nil { if err != nil {
return err return err
} }
if m, ok := rexChecksumConst.MatchFirst(string(content)); ok { if m, ok := rexEnumChecksumConst.MatchFirst(string(content)); ok {
oldChecksum = m.GroupByName("cs").Value() oldChecksum = m.GroupByName("cs").Value()
} }
} }
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() }) langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
newChecksumStr := goext.GoextVersion newChecksumStr := goext.GoextVersion
@@ -85,7 +91,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
for _, f := range files { for _, f := range files {
fmt.Printf("========= %s =========\n\n", f.Name()) fmt.Printf("========= %s =========\n\n", f.Name())
fileEnums, pn, err := processFile(sourceDir, path.Join(sourceDir, f.Name())) fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()))
if err != nil { if err != nil {
return err return err
} }
@@ -103,29 +109,20 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
return errors.New("no package name found in any file") return errors.New("no package name found in any file")
} }
err = os.WriteFile(destFile, []byte(fmtOutput(newChecksum, allEnums, pkgname)), 0o755) fdata, err := format.Source([]byte(fmtEnumOutput(newChecksum, allEnums, pkgname)))
if err != nil { if err != nil {
return err return err
} }
res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) err = os.WriteFile(destFile, fdata, 0o755)
if err != nil { if err != nil {
return err return err
} }
if res.CommandTimedOut {
fmt.Println(res.StdCombined)
return errors.New("go fmt timed out")
}
if res.ExitCode != 0 {
fmt.Println(res.StdCombined)
return errors.New("go fmt did not succeed")
}
return nil return nil
} }
func processFile(basedir string, fn string) ([]EnumDef, string, error) { func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
file, err := os.Open(fn) file, err := os.Open(fn)
if err != nil { if err != nil {
return nil, "", err return nil, "", err
@@ -149,7 +146,7 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
break break
} }
if match, ok := rexPackage.MatchFirst(line); i == 0 && ok { if match, ok := rexEnumPackage.MatchFirst(line); i == 0 && ok {
pkgname = match.GroupByName("name").Value() pkgname = match.GroupByName("name").Value()
continue continue
} }
@@ -172,7 +169,7 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
} }
if match, ok := rexValueDef.MatchFirst(line); ok { if match, ok := rexEnumValueDef.MatchFirst(line); ok {
typename := match.GroupByName("type").Value() typename := match.GroupByName("type").Value()
def := EnumDefVal{ def := EnumDefVal{
VarName: match.GroupByName("name").Value(), VarName: match.GroupByName("name").Value(),
@@ -202,160 +199,33 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) {
return enums, pkgname, nil return enums, pkgname, nil
} }
func fmtOutput(cs string, enums []EnumDef, pkgname string) string { func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string {
str := "// Code generated by enum-generate.go DO NOT EDIT.\n"
str += "\n"
str += "package " + pkgname + "\n"
str += "\n"
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n" templ := template.New("enum-generate")
str += "\n"
str += "const ChecksumGenerator = \"" + cs + "\"" + "\n" templ = templ.Funcs(template.FuncMap{
str += "\n" "boolToStr": func(b bool) string { return langext.Conditional(b, "true", "false") },
"deref": func(v *string) string { return *v },
"trimSpace": func(str string) string { return strings.TrimSpace(str) },
"hasStr": func(v EnumDef) bool { return v.Type == "string" },
"hasDescr": func(v EnumDef) bool {
return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil })
},
})
str += "type Enum interface {" + "\n" templ = template.Must(templ.Parse(templateEnumGenerateText))
str += " Valid() bool" + "\n"
str += " ValuesAny() []any" + "\n"
str += " ValuesMeta() []EnumMetaValue" + "\n"
str += " VarName() string" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "type StringEnum interface {" + "\n" buffer := bytes.Buffer{}
str += " Enum" + "\n"
str += " String() string" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "type DescriptionEnum interface {" + "\n"
str += " Enum" + "\n"
str += " Description() string" + "\n"
str += "}" + "\n"
str += "\n"
str += "type EnumMetaValue struct {" + "\n"
str += " VarName string `json:\"varName\"`" + "\n"
str += " Value any `json:\"value\"`" + "\n"
str += " Description *string `json:\"description\"`" + "\n"
str += "}" + "\n"
str += "\n"
for _, enumdef := range enums {
hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil })
hasStr := enumdef.Type == "string"
str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n"
str += "//" + "\n"
str += "// File: " + enumdef.FileRelative + "\n"
str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n"
str += "// DescrEnum: " + langext.Conditional(hasDescr, "true", "false") + "\n"
str += "//" + "\n"
str += "" + "\n"
str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n"
for _, v := range enumdef.Values {
str += " " + v.VarName + "," + "\n"
}
str += "}" + "\n"
str += "" + "\n"
if hasDescr {
str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n"
for _, v := range enumdef.Values {
str += " " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n"
}
str += "}" + "\n"
str += "" + "\n"
}
str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n"
for _, v := range enumdef.Values {
str += " " + v.VarName + ": \"" + v.VarName + "\"," + "\n"
}
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n"
str += " return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n"
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n"
str += " return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []EnumMetaValue {" + "\n"
str += " return " + enumdef.EnumTypeName + "ValuesMeta()"
str += "}" + "\n"
str += "" + "\n"
if hasStr {
str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n"
str += " return string(e)" + "\n"
str += "}" + "\n"
str += "" + "\n"
}
if hasDescr {
str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n"
str += " if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n"
str += " return d" + "\n"
str += " }" + "\n"
str += " return \"\"" + "\n"
str += "}" + "\n"
str += "" + "\n"
}
str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n"
str += " if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n"
str += " return d" + "\n"
str += " }" + "\n"
str += " return \"\"" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func (e " + enumdef.EnumTypeName + ") Meta() EnumMetaValue {" + "\n"
if hasDescr {
str += " return EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())}"
} else {
str += " return EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}"
}
str += "}" + "\n"
str += "" + "\n"
str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n"
str += " for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n"
str += " if string(ev) == vv {" + "\n"
str += " return ev, true" + "\n"
str += " }" + "\n"
str += " }" + "\n"
str += " return \"\", false" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n"
str += " return __" + enumdef.EnumTypeName + "Values" + "\n"
str += "}" + "\n"
str += "" + "\n"
str += "func " + enumdef.EnumTypeName + "ValuesMeta() []EnumMetaValue {" + "\n"
str += " return []EnumMetaValue{" + "\n"
for _, v := range enumdef.Values {
str += " " + v.VarName + ".Meta(),\n"
}
str += " }" + "\n"
str += "}" + "\n"
str += "" + "\n"
err := templ.Execute(&buffer, langext.H{
"PkgName": pkgname,
"Checksum": cs,
"GoextVersion": goext.GoextVersion,
"Enums": enums,
})
if err != nil {
panic(err)
} }
return str return buffer.String()
} }

View File

@@ -0,0 +1,97 @@
// Code generated by enum-generate.go DO NOT EDIT.
package {{.PkgName}}
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import "gogs.mikescher.com/BlackForestBytes/goext/enums"
const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
{{range .Enums}}
{{ $hasStr := ( . | hasStr ) }}
{{ $hasDescr := ( . | hasDescr ) }}
// ================================ {{.EnumTypeName}} ================================
//
// File: {{.FileRelative}}
// StringEnum: {{$hasStr | boolToStr}}
// DescrEnum: {{$hasDescr | boolToStr}}
//
var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}}
{{.VarName}}, {{end}}
}
{{if $hasDescr}}
var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}}
{{.VarName}}: "{{.Description | deref | trimSpace}}", {{end}}
}
{{end}}
var __{{.EnumTypeName}}Varnames = map[{{.EnumTypeName}}]string{ {{range .Values}}
{{.VarName}}: "{{.VarName}}", {{end}}
}
func (e {{.EnumTypeName}}) Valid() bool {
return langext.InArray(e, __{{.EnumTypeName}}Values)
}
func (e {{.EnumTypeName}}) Values() []{{.EnumTypeName}} {
return __{{.EnumTypeName}}Values
}
func (e {{.EnumTypeName}}) ValuesAny() []any {
return langext.ArrCastToAny(__{{.EnumTypeName}}Values)
}
func (e {{.EnumTypeName}}) ValuesMeta() []enums.EnumMetaValue {
return {{.EnumTypeName}}ValuesMeta()
}
{{if $hasStr}}
func (e {{.EnumTypeName}}) String() string {
return string(e)
}
{{end}}
{{if $hasDescr}}
func (e {{.EnumTypeName}}) Description() string {
if d, ok := __{{.EnumTypeName}}Descriptions[e]; ok {
return d
}
return ""
}
{{end}}
func (e {{.EnumTypeName}}) VarName() string {
if d, ok := __{{.EnumTypeName}}Varnames[e]; ok {
return d
}
return ""
}
func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue {
{{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}}
}
func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) {
for _, ev := range __{{.EnumTypeName}}Values {
if string(ev) == vv {
return ev, true
}
}
return "", false
}
func {{.EnumTypeName}}Values() []{{.EnumTypeName}} {
return __{{.EnumTypeName}}Values
}
func {{.EnumTypeName}}ValuesMeta() []enums.EnumMetaValue {
return []enums.EnumMetaValue{ {{range .Values}}
{{.VarName}}.Meta(), {{end}}
}
}
{{end}}

View File

@@ -1,15 +1,52 @@
package bfcodegen package bfcodegen
import ( import (
_ "embed"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"os"
"path/filepath"
"testing" "testing"
"time"
) )
func TestApplyEnvOverridesSimple(t *testing.T) { //go:embed _test_example.tgz
var EnumExampleModels []byte
err := GenerateEnumSpecs("/home/mike/Code/reiff/badennet/bnet-backend/models", "/home/mike/Code/reiff/badennet/bnet-backend/models/enums_gen.go") func TestGenerateEnumSpecs(t *testing.T) {
if err != nil {
t.Error(err)
t.Fail()
}
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
err := os.WriteFile(tmpFile, EnumExampleModels, 0o777)
tst.AssertNoErr(t, err)
t.Cleanup(func() { _ = os.Remove(tmpFile) })
err = os.Mkdir(tmpDir, 0o777)
tst.AssertNoErr(t, err)
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
tst.AssertNoErr(t, err)
err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go")
tst.AssertNoErr(t, err)
err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go")
tst.AssertNoErr(t, err)
fmt.Println()
fmt.Println()
fmt.Println()
fmt.Println("=====================================================================================================")
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/enums_gen.go"))(t)))
fmt.Println("=====================================================================================================")
fmt.Println()
fmt.Println()
fmt.Println()
} }

183
bfcodegen/id-generate.go Normal file
View File

@@ -0,0 +1,183 @@
package bfcodegen
import (
"bytes"
_ "embed"
"errors"
"fmt"
"go/format"
"gogs.mikescher.com/BlackForestBytes/goext"
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"io"
"os"
"path"
"path/filepath"
"regexp"
"strings"
"text/template"
)
type IDDef struct {
File string
FileRelative string
Name string
}
var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`))
var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
//go:embed id-generate.template
var templateIDGenerateText string
func GenerateIDSpecs(sourceDir string, destFile string) error {
files, err := os.ReadDir(sourceDir)
if err != nil {
return err
}
oldChecksum := "N/A"
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
content, err := os.ReadFile(destFile)
if err != nil {
return err
}
if m, ok := rexIDChecksumConst.MatchFirst(string(content)); ok {
oldChecksum = m.GroupByName("cs").Value()
}
}
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
newChecksumStr := goext.GoextVersion
for _, f := range files {
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
if err != nil {
return err
}
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
}
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
if newChecksum != oldChecksum {
fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
} else {
fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
return nil
}
allIDs := make([]IDDef, 0)
pkgname := ""
for _, f := range files {
fmt.Printf("========= %s =========\n\n", f.Name())
fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()))
if err != nil {
return err
}
fmt.Printf("\n")
allIDs = append(allIDs, fileIDs...)
if pn != "" {
pkgname = pn
}
}
if pkgname == "" {
return errors.New("no package name found in any file")
}
fdata, err := format.Source([]byte(fmtIDOutput(newChecksum, allIDs, pkgname)))
if err != nil {
return err
}
err = os.WriteFile(destFile, fdata, 0o755)
if err != nil {
return err
}
return nil
}
func processIDFile(basedir string, fn string) ([]IDDef, string, error) {
file, err := os.Open(fn)
if err != nil {
return nil, "", err
}
defer func() { _ = file.Close() }()
bin, err := io.ReadAll(file)
if err != nil {
return nil, "", err
}
lines := strings.Split(string(bin), "\n")
ids := make([]IDDef, 0)
pkgname := ""
for i, line := range lines {
if i == 0 && strings.HasPrefix(line, "// Code generated by") {
break
}
if match, ok := rexIDPackage.MatchFirst(line); i == 0 && ok {
pkgname = match.GroupByName("name").Value()
continue
}
if match, ok := rexIDDef.MatchFirst(line); ok {
rfp, err := filepath.Rel(basedir, fn)
if err != nil {
return nil, "", err
}
def := IDDef{
File: fn,
FileRelative: rfp,
Name: match.GroupByName("name").Value(),
}
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
ids = append(ids, def)
}
}
return ids, pkgname, nil
}
func fmtIDOutput(cs string, ids []IDDef, pkgname string) string {
templ := template.Must(template.New("id-generate").Parse(templateIDGenerateText))
buffer := bytes.Buffer{}
anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" })
err := templ.Execute(&buffer, langext.H{
"PkgName": pkgname,
"Checksum": cs,
"GoextVersion": goext.GoextVersion,
"IDs": ids,
"AnyDef": anyDef,
})
if err != nil {
panic(err)
}
return buffer.String()
}

View File

@@ -0,0 +1,47 @@
// Code generated by id-generate.go DO NOT EDIT.
package {{.PkgName}}
import "go.mongodb.org/mongo-driver/bson"
import "go.mongodb.org/mongo-driver/bson/bsontype"
import "go.mongodb.org/mongo-driver/bson/primitive"
import "gogs.mikescher.com/BlackForestBytes/goext/exerr"
const ChecksumIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
{{range .IDs}}
// ================================ {{.Name}} ({{.FileRelative}}) ================================
func (i {{.Name}}) MarshalBSONValue() (bsontype.Type, []byte, error) {
if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil {
return bson.MarshalValue(objId)
} else {
return 0, nil, exerr.New(exerr.TypeMarshalEntityID, "Failed to marshal {{.Name}}("+i.String()+") to ObjectId").Str("value", string(i)).Type("type", i).Build()
}
}
func (i {{.Name}}) String() string {
return string(i)
}
func (i {{.Name}}) ObjID() (primitive.ObjectID, error) {
return primitive.ObjectIDFromHex(string(i))
}
func (i {{.Name}}) Valid() bool {
_, err := primitive.ObjectIDFromHex(string(i))
return err == nil
}
{{if ne $.AnyDef nil}}
func (i {{.Name}}) AsAny() {{$.AnyDef.Name}} {
return {{$.AnyDef.Name}}(i)
}
{{end}}
func New{{.Name}}() {{.Name}} {
return {{.Name}}(primitive.NewObjectID().Hex())
}
{{end}}

View File

@@ -0,0 +1,52 @@
package bfcodegen
import (
_ "embed"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"os"
"path/filepath"
"testing"
"time"
)
//go:embed _test_example.tgz
var IDExampleModels []byte
func TestGenerateIDSpecs(t *testing.T) {
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
err := os.WriteFile(tmpFile, IDExampleModels, 0o777)
tst.AssertNoErr(t, err)
t.Cleanup(func() { _ = os.Remove(tmpFile) })
err = os.Mkdir(tmpDir, 0o777)
tst.AssertNoErr(t, err)
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
tst.AssertNoErr(t, err)
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go")
tst.AssertNoErr(t, err)
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go")
tst.AssertNoErr(t, err)
fmt.Println()
fmt.Println()
fmt.Println()
fmt.Println("=====================================================================================================")
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/id_gen.go"))(t)))
fmt.Println("=====================================================================================================")
fmt.Println()
fmt.Println()
fmt.Println()
}

View File

@@ -14,6 +14,7 @@ type CommandRunner struct {
listener []CommandListener listener []CommandListener
enforceExitCodes *[]int enforceExitCodes *[]int
enforceNoTimeout bool enforceNoTimeout bool
enforceNoStderr bool
} }
func Runner(program string) *CommandRunner { func Runner(program string) *CommandRunner {
@@ -25,6 +26,7 @@ func Runner(program string) *CommandRunner {
listener: make([]CommandListener, 0), listener: make([]CommandListener, 0),
enforceExitCodes: nil, enforceExitCodes: nil,
enforceNoTimeout: false, enforceNoTimeout: false,
enforceNoStderr: false,
} }
} }
@@ -73,6 +75,11 @@ func (r *CommandRunner) FailOnTimeout() *CommandRunner {
return r return r
} }
func (r *CommandRunner) FailOnStderr() *CommandRunner {
r.enforceNoStderr = true
return r
}
func (r *CommandRunner) Listen(lstr CommandListener) *CommandRunner { func (r *CommandRunner) Listen(lstr CommandListener) *CommandRunner {
r.listener = append(r.listener, lstr) r.listener = append(r.listener, lstr)
return r return r

View File

@@ -11,6 +11,7 @@ import (
var ErrExitCode = errors.New("process exited with an unexpected exitcode") var ErrExitCode = errors.New("process exited with an unexpected exitcode")
var ErrTimeout = errors.New("process did not exit after the specified timeout") var ErrTimeout = errors.New("process did not exit after the specified timeout")
var ErrStderrPrint = errors.New("process did print to stderr stream")
type CommandResult struct { type CommandResult struct {
StdOut string StdOut string
@@ -53,12 +54,27 @@ func run(opt CommandRunner) (CommandResult, error) {
err error err error
} }
stderrFailChan := make(chan bool)
outputChan := make(chan resultObj) outputChan := make(chan resultObj)
go func() { go func() {
// we need to first fully read the pipes and then call Wait // we need to first fully read the pipes and then call Wait
// see https://pkg.go.dev/os/exec#Cmd.StdoutPipe // see https://pkg.go.dev/os/exec#Cmd.StdoutPipe
stdout, stderr, stdcombined, err := preader.Read(opt.listener) listener := make([]CommandListener, 0)
listener = append(listener, opt.listener...)
if opt.enforceNoStderr {
listener = append(listener, genericCommandListener{
_readRawStderr: langext.Ptr(func(v []byte) {
if len(v) > 0 {
stderrFailChan <- true
}
}),
})
}
stdout, stderr, stdcombined, err := preader.Read(listener)
if err != nil { if err != nil {
outputChan <- resultObj{stdout, stderr, stdcombined, err} outputChan <- resultObj{stdout, stderr, stdcombined, err}
_ = cmd.Process.Kill() _ = cmd.Process.Kill()
@@ -115,8 +131,34 @@ func run(opt CommandRunner) (CommandResult, error) {
return res, nil return res, nil
} }
case <-stderrFailChan:
_ = cmd.Process.Kill()
if fallback, ok := syncext.ReadChannelWithTimeout(outputChan, 32*time.Millisecond); ok {
// most of the time the cmd.Process.Kill() should also have finished the pipereader
// and we can at least return the already collected stdout, stderr, etc
res := CommandResult{
StdOut: fallback.stdout,
StdErr: fallback.stderr,
StdCombined: fallback.stdcombined,
ExitCode: -1,
CommandTimedOut: false,
}
return res, ErrStderrPrint
} else {
res := CommandResult{
StdOut: "",
StdErr: "",
StdCombined: "",
ExitCode: -1,
CommandTimedOut: false,
}
return res, ErrStderrPrint
}
case outobj := <-outputChan: case outobj := <-outputChan:
if exiterr, ok := outobj.err.(*exec.ExitError); ok { var exiterr *exec.ExitError
if errors.As(outobj.err, &exiterr) {
excode := exiterr.ExitCode() excode := exiterr.ExitCode()
for _, lstr := range opt.listener { for _, lstr := range opt.listener {
lstr.Finished(excode) lstr.Finished(excode)

View File

@@ -1,6 +1,7 @@
package cmdext package cmdext
import ( import (
"errors"
"fmt" "fmt"
"testing" "testing"
"time" "time"
@@ -32,7 +33,7 @@ func TestStdout(t *testing.T) {
func TestStderr(t *testing.T) { func TestStderr(t *testing.T) {
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run() res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run()
if err != nil { if err != nil {
t.Errorf("%v", err) t.Errorf("%v", err)
} }
@@ -55,7 +56,7 @@ func TestStderr(t *testing.T) {
} }
func TestStdcombined(t *testing.T) { func TestStdcombined(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)"). Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)").
Run() Run()
@@ -81,7 +82,7 @@ func TestStdcombined(t *testing.T) {
} }
func TestPartialRead(t *testing.T) { func TestPartialRead(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);"). Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);").
Timeout(100 * time.Millisecond). Timeout(100 * time.Millisecond).
@@ -105,7 +106,7 @@ func TestPartialRead(t *testing.T) {
} }
func TestPartialReadStderr(t *testing.T) { func TestPartialReadStderr(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);"). Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);").
Timeout(100 * time.Millisecond). Timeout(100 * time.Millisecond).
@@ -130,7 +131,7 @@ func TestPartialReadStderr(t *testing.T) {
func TestReadUnflushedStdout(t *testing.T) { func TestReadUnflushedStdout(t *testing.T) {
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run() res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run()
if err != nil { if err != nil {
t.Errorf("%v", err) t.Errorf("%v", err)
} }
@@ -154,7 +155,7 @@ func TestReadUnflushedStdout(t *testing.T) {
func TestReadUnflushedStderr(t *testing.T) { func TestReadUnflushedStderr(t *testing.T) {
res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run() res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run()
if err != nil { if err != nil {
t.Errorf("%v", err) t.Errorf("%v", err)
} }
@@ -179,7 +180,7 @@ func TestReadUnflushedStderr(t *testing.T) {
func TestPartialReadUnflushed(t *testing.T) { func TestPartialReadUnflushed(t *testing.T) {
t.SkipNow() t.SkipNow()
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');"). Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');").
Timeout(100 * time.Millisecond). Timeout(100 * time.Millisecond).
@@ -205,7 +206,7 @@ func TestPartialReadUnflushed(t *testing.T) {
func TestPartialReadUnflushedStderr(t *testing.T) { func TestPartialReadUnflushedStderr(t *testing.T) {
t.SkipNow() t.SkipNow()
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');"). Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');").
Timeout(100 * time.Millisecond). Timeout(100 * time.Millisecond).
@@ -230,7 +231,7 @@ func TestPartialReadUnflushedStderr(t *testing.T) {
func TestListener(t *testing.T) { func TestListener(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys;" + Arg("import sys;" +
"import time;" + "import time;" +
@@ -263,7 +264,7 @@ func TestListener(t *testing.T) {
func TestLongStdout(t *testing.T) { func TestLongStdout(t *testing.T) {
res1, err := Runner("python"). res1, err := Runner("python3").
Arg("-c"). Arg("-c").
Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");"). Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");").
Timeout(5000 * time.Millisecond). Timeout(5000 * time.Millisecond).
@@ -289,16 +290,40 @@ func TestLongStdout(t *testing.T) {
func TestFailOnTimeout(t *testing.T) { func TestFailOnTimeout(t *testing.T) {
_, err := Runner("sleep").Arg("2").Timeout(200 * time.Millisecond).FailOnTimeout().Run() _, err := Runner("sleep").Arg("2").Timeout(200 * time.Millisecond).FailOnTimeout().Run()
if err != ErrTimeout { if !errors.Is(err, ErrTimeout) {
t.Errorf("wrong err := %v", err) t.Errorf("wrong err := %v", err)
} }
} }
func TestFailOnStderr(t *testing.T) {
res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").FailOnStderr().Run()
if err == nil {
t.Errorf("no err")
}
if res1.CommandTimedOut {
t.Errorf("Timeout")
}
if res1.ExitCode != -1 {
t.Errorf("res1.ExitCode == %v", res1.ExitCode)
}
if res1.StdErr != "error" {
t.Errorf("res1.StdErr == '%v'", res1.StdErr)
}
if res1.StdOut != "" {
t.Errorf("res1.StdOut == '%v'", res1.StdOut)
}
if res1.StdCombined != "error\n" {
t.Errorf("res1.StdCombined == '%v'", res1.StdCombined)
}
}
func TestFailOnExitcode(t *testing.T) { func TestFailOnExitcode(t *testing.T) {
_, err := Runner("false").Timeout(200 * time.Millisecond).FailOnExitCode().Run() _, err := Runner("false").Timeout(200 * time.Millisecond).FailOnExitCode().Run()
if err != ErrExitCode { if !errors.Is(err, ErrExitCode) {
t.Errorf("wrong err := %v", err) t.Errorf("wrong err := %v", err)
} }

View File

@@ -32,8 +32,8 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
stdout := "" stdout := ""
go func() { go func() {
buf := make([]byte, 128) buf := make([]byte, 128)
for true { for {
n, out := pr.stdout.Read(buf) n, err := pr.stdout.Read(buf)
if n > 0 { if n > 0 {
txt := string(buf[:n]) txt := string(buf[:n])
stdout += txt stdout += txt
@@ -42,11 +42,11 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
lstr.ReadRawStdout(buf[:n]) lstr.ReadRawStdout(buf[:n])
} }
} }
if out == io.EOF { if err == io.EOF {
break break
} }
if out != nil { if err != nil {
errch <- out errch <- err
break break
} }
} }
@@ -61,7 +61,7 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string,
stderr := "" stderr := ""
go func() { go func() {
buf := make([]byte, 128) buf := make([]byte, 128)
for true { for {
n, err := pr.stderr.Read(buf) n, err := pr.stderr.Read(buf)
if n > 0 { if n > 0 {

View File

@@ -41,12 +41,12 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error
continue continue
} }
if rvfield.Kind() == reflect.Struct { envkey, found := rsfield.Tag.Lookup("env")
if !found || envkey == "-" {
continue
}
envkey, found := rsfield.Tag.Lookup("env") if rvfield.Kind() == reflect.Struct && rvfield.Type() != reflect.TypeOf(time.UnixMilli(0)) {
if !found || envkey == "-" {
continue
}
subPrefix := prefix subPrefix := prefix
if envkey != "" { if envkey != "" {
@@ -57,10 +57,7 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error
if err != nil { if err != nil {
return err return err
} }
}
envkey := rsfield.Tag.Get("env")
if envkey == "" || envkey == "-" {
continue continue
} }

View File

@@ -66,7 +66,6 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo
return int(version), nil, payload, false, nil, true return int(version), nil, payload, false, nil, true
} }
//
if version == 2 { if version == 2 {
if len(split) != 3 { if len(split) != 3 {
return -1, nil, nil, false, nil, false return -1, nil, nil, false, nil, false

View File

@@ -4,6 +4,10 @@ import (
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
) )
type RawFilter interface {
FilterQuery() mongo.Pipeline
}
type Filter interface { type Filter interface {
FilterQuery() mongo.Pipeline FilterQuery() mongo.Pipeline
Pagination() (string, SortDirection, string, SortDirection) Pagination() (string, SortDirection, string, SortDirection)

View File

@@ -7,6 +7,9 @@ type SyncSet[TData comparable] struct {
lock sync.Mutex lock sync.Mutex
} }
// Add adds `value` to the set
// returns true if the value was actually inserted
// returns false if the value already existed
func (s *SyncSet[TData]) Add(value TData) bool { func (s *SyncSet[TData]) Add(value TData) bool {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
@@ -15,10 +18,10 @@ func (s *SyncSet[TData]) Add(value TData) bool {
s.data = make(map[TData]bool) s.data = make(map[TData]bool)
} }
_, ok := s.data[value] _, existsInPreState := s.data[value]
s.data[value] = true s.data[value] = true
return !ok return !existsInPreState
} }
func (s *SyncSet[TData]) AddAll(values []TData) { func (s *SyncSet[TData]) AddAll(values []TData) {

170
dataext/tuple.go Normal file
View File

@@ -0,0 +1,170 @@
package dataext
type ValueGroup interface {
TupleLength() int
TupleValues() []any
}
// ----------------------------------------------------------------------------
type Single[T1 any] struct {
V1 T1
}
func (s Single[T1]) TupleLength() int {
return 1
}
func (s Single[T1]) TupleValues() []any {
return []any{s.V1}
}
// ----------------------------------------------------------------------------
type Tuple[T1 any, T2 any] struct {
V1 T1
V2 T2
}
func (t Tuple[T1, T2]) TupleLength() int {
return 2
}
func (t Tuple[T1, T2]) TupleValues() []any {
return []any{t.V1, t.V2}
}
// ----------------------------------------------------------------------------
type Triple[T1 any, T2 any, T3 any] struct {
V1 T1
V2 T2
V3 T3
}
func (t Triple[T1, T2, T3]) TupleLength() int {
return 3
}
func (t Triple[T1, T2, T3]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3}
}
// ----------------------------------------------------------------------------
type Quadruple[T1 any, T2 any, T3 any, T4 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
}
func (t Quadruple[T1, T2, T3, T4]) TupleLength() int {
return 4
}
func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4}
}
// ----------------------------------------------------------------------------
type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
}
func (t Quintuple[T1, T2, T3, T4, T5]) TupleLength() int {
return 5
}
func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5}
}
// ----------------------------------------------------------------------------
type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
V6 T6
}
func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleLength() int {
return 6
}
func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6}
}
// ----------------------------------------------------------------------------
type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
V6 T6
V7 T7
}
func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleLength() int {
return 7
}
func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7}
}
// ----------------------------------------------------------------------------
type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
V6 T6
V7 T7
V8 T8
}
func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleLength() int {
return 8
}
func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8}
}
// ----------------------------------------------------------------------------
type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct {
V1 T1
V2 T2
V3 T3
V4 T4
V5 T5
V6 T6
V7 T7
V8 T8
V9 T9
}
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int {
return 9
}
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9}
}

24
enums/enum.go Normal file
View File

@@ -0,0 +1,24 @@
package enums
type Enum interface {
Valid() bool
ValuesAny() []any
ValuesMeta() []EnumMetaValue
VarName() string
}
type StringEnum interface {
Enum
String() string
}
type DescriptionEnum interface {
Enum
Description() string
}
type EnumMetaValue struct {
VarName string `json:"varName"`
Value any `json:"value"`
Description *string `json:"description"`
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/rs/zerolog" "github.com/rs/zerolog"
"go.mongodb.org/mongo-driver/bson/primitive" "go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/dataext" "gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/enums"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"net/http" "net/http"
"os" "os"
@@ -80,6 +81,10 @@ func New(t ErrorType, msg string) *Builder {
} }
func Wrap(err error, msg string) *Builder { func Wrap(err error, msg string) *Builder {
if err == nil {
return &Builder{errorData: newExErr(CatSystem, TypeInternal, msg)} // prevent NPE if we call Wrap with err==nil
}
if !pkgconfig.RecursiveErrors { if !pkgconfig.RecursiveErrors {
v := FromError(err) v := FromError(err)
v.Message = msg v.Message = msg
@@ -269,6 +274,18 @@ func (b *Builder) Any(key string, val any) *Builder {
return b.addMeta(key, MDTAny, newAnyWrap(val)) return b.addMeta(key, MDTAny, newAnyWrap(val))
} }
func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder {
if langext.IsNil(val) {
return b.addMeta(key, MDTString, "(!nil)")
} else {
return b.addMeta(key, MDTString, val.String())
}
}
func (b *Builder) Enum(key string, val enums.Enum) *Builder {
return b.addMeta(key, MDTEnum, newEnumWrap(val))
}
func (b *Builder) Stack() *Builder { func (b *Builder) Stack() *Builder {
return b.addMeta("@Stack", MDTString, string(debug.Stack())) return b.addMeta("@Stack", MDTString, string(debug.Stack()))
} }
@@ -289,7 +306,7 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
} }
b.Str("gin.method", req.Method) b.Str("gin.method", req.Method)
b.Str("gin.path", g.FullPath()) b.Str("gin.path", g.FullPath())
b.Str("gin.header", formatHeader(g.Request.Header)) b.Strs("gin.header", extractHeader(g.Request.Header))
if req.URL != nil { if req.URL != nil {
b.Str("gin.url", req.URL.String()) b.Str("gin.url", req.URL.String())
} }
@@ -305,22 +322,38 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
if ctxVal := g.GetString("reqid"); ctxVal != "" { if ctxVal := g.GetString("reqid"); ctxVal != "" {
b.Str("gin.context.reqid", ctxVal) b.Str("gin.context.reqid", ctxVal)
} }
if req.Method != "GET" && req.Body != nil && req.Header.Get("Content-Type") == "application/json" { if req.Method != "GET" && req.Body != nil {
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
if bin, err := brc.BufferedAll(); err == nil { if req.Header.Get("Content-Type") == "application/json" {
if len(bin) < 16*1024 { if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
var prettyJSON bytes.Buffer if bin, err := brc.BufferedAll(); err == nil {
err = json.Indent(&prettyJSON, bin, "", " ") if len(bin) < 16*1024 {
if err == nil { var prettyJSON bytes.Buffer
b.Str("gin.body", string(prettyJSON.Bytes())) err = json.Indent(&prettyJSON, bin, "", " ")
if err == nil {
b.Str("gin.body", string(prettyJSON.Bytes()))
} else {
b.Bytes("gin.body", bin)
}
} else { } else {
b.Bytes("gin.body", bin) b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
} }
} else {
b.Str("gin.body", fmt.Sprintf("[[%v bytes]]", len(bin)))
} }
} }
} }
if req.Header.Get("Content-Type") == "multipart/form-data" || req.Header.Get("Content-Type") == "x-www-form-urlencoded" {
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
if bin, err := brc.BufferedAll(); err == nil {
if len(bin) < 16*1024 {
b.Bytes("gin.body", bin)
} else {
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
}
}
}
}
} }
b.containsGinData = true b.containsGinData = true
@@ -350,6 +383,20 @@ func formatHeader(header map[string][]string) string {
return r return r
} }
func extractHeader(header map[string][]string) []string {
r := make([]string, 0, len(header))
for k, v := range header {
for _, hval := range v {
value := hval
value = strings.ReplaceAll(value, "\n", "\\n")
value = strings.ReplaceAll(value, "\r", "\\r")
value = strings.ReplaceAll(value, "\t", "\\t")
r = append(r, k+": "+value)
}
}
return r
}
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// Build creates a new error, ready to pass up the stack // Build creates a new error, ready to pass up the stack

View File

@@ -27,6 +27,7 @@ func FromError(err error) *ExErr {
StatusCode: nil, StatusCode: nil,
Message: err.Error(), Message: err.Error(),
WrappedErrType: fmt.Sprintf("%T", err), WrappedErrType: fmt.Sprintf("%T", err),
WrappedErr: err,
Caller: "", Caller: "",
OriginalError: nil, OriginalError: nil,
Meta: getForeignMeta(err), Meta: getForeignMeta(err),
@@ -43,6 +44,7 @@ func newExErr(cat ErrorCategory, errtype ErrorType, msg string) *ExErr {
StatusCode: nil, StatusCode: nil,
Message: msg, Message: msg,
WrappedErrType: "", WrappedErrType: "",
WrappedErr: nil,
Caller: callername(2), Caller: callername(2),
OriginalError: nil, OriginalError: nil,
Meta: make(map[string]MetaValue), Meta: make(map[string]MetaValue),
@@ -59,6 +61,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE
StatusCode: e.StatusCode, StatusCode: e.StatusCode,
Message: msg, Message: msg,
WrappedErrType: "", WrappedErrType: "",
WrappedErr: nil,
Caller: callername(1 + stacktraceskip), Caller: callername(1 + stacktraceskip),
OriginalError: e, OriginalError: e,
Meta: make(map[string]MetaValue), Meta: make(map[string]MetaValue),

View File

@@ -1,6 +1,7 @@
package exerr package exerr
import ( import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
@@ -37,24 +38,40 @@ type ErrorType struct {
//goland:noinspection GoUnusedGlobalVariable //goland:noinspection GoUnusedGlobalVariable
var ( var (
TypeInternal = ErrorType{"INTERNAL_ERROR", langext.Ptr(500)} TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
TypePanic = ErrorType{"PANIC", langext.Ptr(500)} TypePanic = NewType("PANIC", langext.Ptr(500))
TypeNotImplemented = ErrorType{"NOT_IMPLEMENTED", langext.Ptr(500)} TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
TypeWrap = ErrorType{"Wrap", nil} TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
TypeBindFailURI = ErrorType{"BINDFAIL_URI", langext.Ptr(400)} TypeWrap = NewType("Wrap", nil)
TypeBindFailQuery = ErrorType{"BINDFAIL_QUERY", langext.Ptr(400)}
TypeBindFailJSON = ErrorType{"BINDFAIL_JSON", langext.Ptr(400)}
TypeBindFailFormData = ErrorType{"BINDFAIL_FORMDATA", langext.Ptr(400)}
TypeUnauthorized = ErrorType{"UNAUTHORIZED", langext.Ptr(401)} TypeBindFailURI = NewType("BINDFAIL_URI", langext.Ptr(400))
TypeAuthFailed = ErrorType{"AUTH_FAILED", langext.Ptr(401)} TypeBindFailQuery = NewType("BINDFAIL_QUERY", langext.Ptr(400))
TypeBindFailJSON = NewType("BINDFAIL_JSON", langext.Ptr(400))
TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400))
TypeBindFailHeader = NewType("BINDFAIL_HEADER", langext.Ptr(400))
// other values come from pkgconfig TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400))
TypeInvalidCSID = NewType("INVALID_CSID", langext.Ptr(400))
TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401))
TypeAuthFailed = NewType("AUTH_FAILED", langext.Ptr(401))
// other values come the used package
) )
var registeredTypes = dataext.SyncSet[string]{}
func NewType(key string, defStatusCode *int) ErrorType { func NewType(key string, defStatusCode *int) ErrorType {
insertOkay := registeredTypes.Add(key)
if !insertOkay {
panic("Cannot register same ErrType ('" + key + "') more than once")
}
return ErrorType{key, defStatusCode} return ErrorType{key, defStatusCode}
} }

View File

@@ -6,32 +6,35 @@ import (
) )
type ErrorPackageConfig struct { type ErrorPackageConfig struct {
ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal) ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal)
ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities) ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities)
RecursiveErrors bool // errors contains their Origin-Error RecursiveErrors bool // errors contains their Origin-Error
ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output() ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output()
ExtendGinOutput func(json map[string]any) // (Optionally) extend the gin output with more fields IncludeMetaInGinOutput bool // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output()
ExtendGinDataOutput func(json map[string]any) // (Optionally) extend the gin `__data` output with more fields ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields
} }
type ErrorPackageConfigInit struct { type ErrorPackageConfigInit struct {
ZeroLogErrTraces bool ZeroLogErrTraces *bool
ZeroLogAllTraces bool ZeroLogAllTraces *bool
RecursiveErrors bool RecursiveErrors *bool
ExtendedGinOutput bool ExtendedGinOutput *bool
ExtendGinOutput *func(json map[string]any) IncludeMetaInGinOutput *bool
ExtendGinDataOutput *func(json map[string]any) ExtendGinOutput func(err *ExErr, json map[string]any)
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any)
} }
var initialized = false var initialized = false
var pkgconfig = ErrorPackageConfig{ var pkgconfig = ErrorPackageConfig{
ZeroLogErrTraces: true, ZeroLogErrTraces: true,
ZeroLogAllTraces: false, ZeroLogAllTraces: false,
RecursiveErrors: true, RecursiveErrors: true,
ExtendedGinOutput: false, ExtendedGinOutput: false,
ExtendGinOutput: func(json map[string]any) {}, IncludeMetaInGinOutput: true,
ExtendGinDataOutput: func(json map[string]any) {}, ExtendGinOutput: func(err *ExErr, json map[string]any) {},
ExtendGinDataOutput: func(err *ExErr, depth int, json map[string]any) {},
} }
// Init initializes the exerr packages // Init initializes the exerr packages
@@ -42,13 +45,24 @@ func Init(cfg ErrorPackageConfigInit) {
panic("Cannot re-init error package") panic("Cannot re-init error package")
} }
ego := func(err *ExErr, json map[string]any) {}
egdo := func(err *ExErr, depth int, json map[string]any) {}
if cfg.ExtendGinOutput != nil {
ego = cfg.ExtendGinOutput
}
if cfg.ExtendGinDataOutput != nil {
egdo = cfg.ExtendGinDataOutput
}
pkgconfig = ErrorPackageConfig{ pkgconfig = ErrorPackageConfig{
ZeroLogErrTraces: cfg.ZeroLogErrTraces, ZeroLogErrTraces: langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces),
ZeroLogAllTraces: cfg.ZeroLogAllTraces, ZeroLogAllTraces: langext.Coalesce(cfg.ZeroLogAllTraces, pkgconfig.ZeroLogAllTraces),
RecursiveErrors: cfg.RecursiveErrors, RecursiveErrors: langext.Coalesce(cfg.RecursiveErrors, pkgconfig.RecursiveErrors),
ExtendedGinOutput: cfg.ExtendedGinOutput, ExtendedGinOutput: langext.Coalesce(cfg.ExtendedGinOutput, pkgconfig.ExtendedGinOutput),
ExtendGinOutput: langext.Coalesce(cfg.ExtendGinOutput, func(json map[string]any) {}), IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput),
ExtendGinDataOutput: langext.Coalesce(cfg.ExtendGinDataOutput, func(json map[string]any) {}), ExtendGinOutput: ego,
ExtendGinDataOutput: egdo,
} }
initialized = true initialized = true

View File

@@ -4,6 +4,7 @@ import (
"github.com/rs/xid" "github.com/rs/xid"
"github.com/rs/zerolog" "github.com/rs/zerolog"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strings" "strings"
"time" "time"
) )
@@ -20,21 +21,61 @@ type ExErr struct {
Message string `json:"message"` Message string `json:"message"`
WrappedErrType string `json:"wrappedErrType"` WrappedErrType string `json:"wrappedErrType"`
WrappedErr any `json:"-"`
Caller string `json:"caller"` Caller string `json:"caller"`
OriginalError *ExErr OriginalError *ExErr `json:"originalError"`
Meta MetaMap `json:"meta"` Meta MetaMap `json:"meta"`
} }
func (ee *ExErr) Error() string { func (ee *ExErr) Error() string {
return ee.Message return ee.RecursiveMessage()
} }
// Unwrap must be implemented so that some error.XXX methods work
func (ee *ExErr) Unwrap() error { func (ee *ExErr) Unwrap() error {
if ee.OriginalError == nil {
return nil // this is neccessary - otherwise we return a wrapped nil and the `x == nil` comparison fails (= panic in errors.Is and other failures)
}
return ee.OriginalError return ee.OriginalError
} }
// Is must be implemented so that error.Is(x) works
func (ee *ExErr) Is(e error) bool {
return IsFrom(ee, e)
}
// As must be implemented so that error.As(x) works
//
//goland:noinspection GoTypeAssertionOnErrors
func (ee *ExErr) As(target any) bool {
if dstErr, ok := target.(*ExErr); ok {
if dst0, ok := ee.contains(dstErr); ok {
dstErr = dst0
return true
} else {
return false
}
} else {
val := reflect.ValueOf(target)
typStr := val.Type().Elem().String()
for curr := ee; curr != nil; curr = curr.OriginalError {
if curr.Category == CatForeign && curr.WrappedErrType == typStr && curr.WrappedErr != nil {
val.Elem().Set(reflect.ValueOf(curr.WrappedErr))
return true
}
}
return false
}
}
func (ee *ExErr) Log(evt *zerolog.Event) { func (ee *ExErr) Log(evt *zerolog.Event) {
evt.Msg(ee.FormatLog(LogPrintFull)) evt.Msg(ee.FormatLog(LogPrintFull))
} }
@@ -123,7 +164,7 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
} }
func (ee *ExErr) ShortLog(evt *zerolog.Event) { func (ee *ExErr) ShortLog(evt *zerolog.Event) {
ee.Meta.Apply(evt).Msg(ee.FormatLog(LogPrintShort)) ee.Meta.Apply(evt, langext.Ptr(240)).Msg(ee.FormatLog(LogPrintShort))
} }
// RecursiveMessage returns the message to show // RecursiveMessage returns the message to show
@@ -162,7 +203,6 @@ func (ee *ExErr) RecursiveStatuscode() *int {
} }
} }
// fallback to <empty>
return nil return nil
} }
@@ -179,6 +219,19 @@ func (ee *ExErr) RecursiveCategory() ErrorCategory {
return ee.Category return ee.Category
} }
// RecursiveMeta searches (top-down) for teh first error that has a meta value with teh specified key
// and returns its value (or nil)
func (ee *ExErr) RecursiveMeta(key string) *MetaValue {
for curr := ee; curr != nil; curr = curr.OriginalError {
if metaval, ok := curr.Meta[key]; ok {
return langext.Ptr(metaval)
}
}
return nil
}
// Depth returns the depth of recursively contained errors
func (ee *ExErr) Depth() int { func (ee *ExErr) Depth() int {
if ee.OriginalError == nil { if ee.OriginalError == nil {
return 1 return 1
@@ -187,6 +240,59 @@ func (ee *ExErr) Depth() int {
} }
} }
// contains test if the supplied error is contained in this error (anywhere in the chain)
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
if original == nil {
return nil, false
}
if ee == original {
return ee, true
}
for curr := ee; curr != nil; curr = curr.OriginalError {
if curr.equalsDirectProperties(curr) {
return curr, true
}
}
return nil, false
}
// equalsDirectProperties tests if ee and other are equals, but only looks at primary properties (not `OriginalError` or `Meta`)
func (ee *ExErr) equalsDirectProperties(other *ExErr) bool {
if ee.UniqueID != other.UniqueID {
return false
}
if ee.Timestamp != other.Timestamp {
return false
}
if ee.Category != other.Category {
return false
}
if ee.Severity != other.Severity {
return false
}
if ee.Type != other.Type {
return false
}
if ee.StatusCode != other.StatusCode {
return false
}
if ee.Message != other.Message {
return false
}
if ee.WrappedErrType != other.WrappedErrType {
return false
}
if ee.Caller != other.Caller {
return false
}
return true
}
func newID() string { func newID() string {
return xid.New().String() return xid.New().String()
} }

93
exerr/exerr_test.go Normal file
View File

@@ -0,0 +1,93 @@
package exerr
import (
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
)
type golangErr struct {
Message string
}
func (g golangErr) Error() string {
return g.Message
}
type golangErr2 struct {
Message string
}
func (g golangErr2) Error() string {
return g.Message
}
type simpleError struct {
}
func (g simpleError) Error() string {
return "Something simple went wroong"
}
type simpleError2 struct {
}
func (g simpleError2) Error() string {
return "Something simple went wroong"
}
func TestExErrIs1(t *testing.T) {
e0 := simpleError{}
wrap := Wrap(e0, "something went wrong").Str("test", "123").Build()
tst.AssertTrue(t, errors.Is(wrap, simpleError{}))
tst.AssertFalse(t, errors.Is(wrap, golangErr{}))
tst.AssertFalse(t, errors.Is(wrap, golangErr{"error1"}))
}
func TestExErrIs2(t *testing.T) {
e0 := golangErr{"error1"}
wrap := Wrap(e0, "something went wrong").Str("test", "123").Build()
tst.AssertTrue(t, errors.Is(wrap, e0))
tst.AssertTrue(t, errors.Is(wrap, golangErr{"error1"}))
tst.AssertFalse(t, errors.Is(wrap, golangErr{"error2"}))
tst.AssertFalse(t, errors.Is(wrap, simpleError{}))
}
func TestExErrAs(t *testing.T) {
e0 := golangErr{"error1"}
w0 := Wrap(e0, "something went wrong").Str("test", "123").Build()
{
out := golangErr{}
ok := errors.As(w0, &out)
tst.AssertTrue(t, ok)
tst.AssertEqual(t, out.Message, "error1")
}
w1 := Wrap(w0, "outher error").Build()
{
out := golangErr{}
ok := errors.As(w1, &out)
tst.AssertTrue(t, ok)
tst.AssertEqual(t, out.Message, "error1")
}
{
out := golangErr2{}
ok := errors.As(w1, &out)
tst.AssertFalse(t, ok)
}
{
out := simpleError2{}
ok := errors.As(w1, &out)
tst.AssertFalse(t, ok)
}
}

View File

@@ -3,50 +3,91 @@ package exerr
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
json "gogs.mikescher.com/BlackForestBytes/goext/gojson" json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"net/http" "net/http"
"time" "time"
) )
func (ee *ExErr) toJson() gin.H { func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) langext.H {
json := gin.H{} ginJson := langext.H{}
if ee.UniqueID != "" { if ee.UniqueID != "" {
json["id"] = ee.UniqueID ginJson["id"] = ee.UniqueID
} }
if ee.Category != CatWrap { if ee.Category != CatWrap {
json["category"] = ee.Category ginJson["category"] = ee.Category
} }
if ee.Type != TypeWrap { if ee.Type != TypeWrap {
json["type"] = ee.Type ginJson["type"] = ee.Type
} }
if ee.StatusCode != nil { if ee.StatusCode != nil {
json["statuscode"] = ee.StatusCode ginJson["statuscode"] = ee.StatusCode
} }
if ee.Message != "" { if ee.Message != "" {
json["message"] = ee.Message ginJson["message"] = ee.Message
} }
if ee.Caller != "" { if ee.Caller != "" {
json["caller"] = ee.Caller ginJson["caller"] = ee.Caller
} }
if ee.Severity != SevErr { if ee.Severity != SevErr {
json["severity"] = ee.Severity ginJson["severity"] = ee.Severity
} }
if ee.Timestamp != (time.Time{}) { if ee.Timestamp != (time.Time{}) {
json["time"] = ee.Timestamp.Format(time.RFC3339) ginJson["time"] = ee.Timestamp.Format(time.RFC3339)
} }
if ee.WrappedErrType != "" { if ee.WrappedErrType != "" {
json["wrappedErrType"] = ee.WrappedErrType ginJson["wrappedErrType"] = ee.WrappedErrType
} }
if ee.OriginalError != nil { if ee.OriginalError != nil {
json["original"] = ee.OriginalError.toJson() ginJson["original"] = ee.OriginalError.toJson(depth+1, applyExtendListener, outputMeta)
} }
pkgconfig.ExtendGinDataOutput(json) if outputMeta {
metaJson := langext.H{}
for metaKey, metaVal := range ee.Meta {
metaJson[metaKey] = metaVal.rawValueForJson()
}
ginJson["meta"] = metaJson
}
return json if applyExtendListener {
pkgconfig.ExtendGinDataOutput(ee, depth, ginJson)
}
return ginJson
}
// ToAPIJson converts the ExError to a json object
// (the same object as used in the Output(gin) method)
//
// Parameters:
// - [applyExtendListener]: if false the pkgconfig.ExtendGinOutput / pkgconfig.ExtendGinDataOutput will not be applied
// - [includeWrappedErrors]: if false we do not include the recursive/wrapped errors in `__data`
// - [includeMetaFields]: if true we also include meta-values (aka from `.Str(key, value).Build()`), needs includeWrappedErrors=true
func (ee *ExErr) ToAPIJson(applyExtendListener bool, includeWrappedErrors bool, includeMetaFields bool) langext.H {
apiOutput := langext.H{
"errorid": ee.UniqueID,
"message": ee.RecursiveMessage(),
"errorcode": ee.RecursiveType().Key,
"category": ee.RecursiveCategory().Category,
}
if includeWrappedErrors {
apiOutput["__data"] = ee.toJson(0, applyExtendListener, includeMetaFields)
}
if applyExtendListener {
pkgconfig.ExtendGinOutput(ee, apiOutput)
}
return apiOutput
} }
func (ee *ExErr) Output(g *gin.Context) { func (ee *ExErr) Output(g *gin.Context) {
warnOnPkgConfigNotInitialized()
var statuscode = http.StatusInternalServerError var statuscode = http.StatusInternalServerError
var baseCat = ee.RecursiveCategory() var baseCat = ee.RecursiveCategory()
@@ -65,20 +106,7 @@ func (ee *ExErr) Output(g *gin.Context) {
statuscode = *baseType.DefaultStatusCode statuscode = *baseType.DefaultStatusCode
} }
warnOnPkgConfigNotInitialized() ginOutput := ee.ToAPIJson(true, pkgconfig.ExtendedGinOutput, pkgconfig.IncludeMetaInGinOutput)
ginOutput := gin.H{
"errorid": ee.UniqueID,
"message": ee.RecursiveMessage(),
"errorcode": ee.RecursiveType(),
"category": ee.RecursiveCategory(),
}
if pkgconfig.ExtendedGinOutput {
ginOutput["__data"] = ee.toJson()
}
pkgconfig.ExtendGinOutput(ginOutput)
g.Render(statuscode, json.GoJsonRender{Data: ginOutput, NilSafeSlices: true, NilSafeMaps: true}) g.Render(statuscode, json.GoJsonRender{Data: ginOutput, NilSafeSlices: true, NilSafeMaps: true})
} }

View File

@@ -24,6 +24,8 @@ func IsFrom(e error, original error) bool {
if e == nil { if e == nil {
return false return false
} }
//goland:noinspection GoDirectComparisonOfErrors
if e == original { if e == original {
return true return true
} }

View File

@@ -43,6 +43,7 @@ const (
MDTID metaDataType = "ID" MDTID metaDataType = "ID"
MDTAny metaDataType = "Interface" MDTAny metaDataType = "Interface"
MDTNil metaDataType = "Nil" MDTNil metaDataType = "Nil"
MDTEnum metaDataType = "Enum"
) )
type MetaValue struct { type MetaValue struct {
@@ -131,6 +132,8 @@ func (v MetaValue) SerializeValue() (string, error) {
return string(r), nil return string(r), nil
case MDTNil: case MDTNil:
return "", nil return "", nil
case MDTEnum:
return v.Value.(EnumWrap).Serialize(), nil
} }
return "", errors.New("Unknown type: " + string(v.DataType)) return "", errors.New("Unknown type: " + string(v.DataType))
} }
@@ -208,27 +211,41 @@ func (v MetaValue) ShortString(lim int) string {
return langext.StrLimit(string(r), lim, "...") return langext.StrLimit(string(r), lim, "...")
case MDTNil: case MDTNil:
return "<<null>>" return "<<null>>"
case MDTEnum:
return v.Value.(EnumWrap).String()
} }
return "(err)" return "(err)"
} }
func (v MetaValue) Apply(key string, evt *zerolog.Event) *zerolog.Event { func (v MetaValue) Apply(key string, evt *zerolog.Event, limitLen *int) *zerolog.Event {
switch v.DataType { switch v.DataType {
case MDTString: case MDTString:
return evt.Str(key, v.Value.(string)) if limitLen == nil {
return evt.Str(key, v.Value.(string))
} else {
return evt.Str(key, langext.StrLimit(v.Value.(string), *limitLen, "..."))
}
case MDTID: case MDTID:
return evt.Str(key, v.Value.(IDWrap).Value) return evt.Str(key, v.Value.(IDWrap).Value)
case MDTAny: case MDTAny:
if v.Value.(AnyWrap).IsError { if v.Value.(AnyWrap).IsError {
return evt.Str(key, "(err)") return evt.Str(key, "(err)")
} else { } else {
return evt.Str(key, v.Value.(AnyWrap).Json) if limitLen == nil {
return evt.Str(key, v.Value.(AnyWrap).Json)
} else {
return evt.Str(key, langext.StrLimit(v.Value.(AnyWrap).Json, *limitLen, "..."))
}
} }
case MDTStringPtr: case MDTStringPtr:
if langext.IsNil(v.Value) { if langext.IsNil(v.Value) {
return evt.Str(key, "<<null>>") return evt.Str(key, "<<null>>")
} }
return evt.Str(key, langext.CoalesceString(v.Value.(*string), "<<null>>")) if limitLen == nil {
return evt.Str(key, langext.CoalesceString(v.Value.(*string), "<<null>>"))
} else {
return evt.Str(key, langext.StrLimit(langext.CoalesceString(v.Value.(*string), "<<null>>"), *limitLen, "..."))
}
case MDTInt: case MDTInt:
return evt.Int(key, v.Value.(int)) return evt.Int(key, v.Value.(int))
case MDTInt8: case MDTInt8:
@@ -270,6 +287,14 @@ func (v MetaValue) Apply(key string, evt *zerolog.Event) *zerolog.Event {
return evt.Ints32(key, v.Value.([]int32)) return evt.Ints32(key, v.Value.([]int32))
case MDTNil: case MDTNil:
return evt.Str(key, "<<null>>") return evt.Str(key, "<<null>>")
case MDTEnum:
if v.Value.(EnumWrap).IsNil {
return evt.Any(key, nil)
} else if v.Value.(EnumWrap).ValueRaw != nil {
return evt.Any(key, v.Value.(EnumWrap).ValueRaw)
} else {
return evt.Str(key, v.Value.(EnumWrap).ValueString)
}
} }
return evt.Str(key, "(err)") return evt.Str(key, "(err)")
} }
@@ -511,6 +536,10 @@ func (v *MetaValue) Deserialize(value string, datatype metaDataType) error {
v.Value = nil v.Value = nil
v.DataType = datatype v.DataType = datatype
return nil return nil
case MDTEnum:
v.Value = deserializeEnumWrap(value)
v.DataType = datatype
return nil
} }
return errors.New("Unknown type: " + string(datatype)) return errors.New("Unknown type: " + string(datatype))
} }
@@ -581,10 +610,66 @@ func (v MetaValue) ValueString() string {
return string(r) return string(r)
case MDTNil: case MDTNil:
return "<<null>>" return "<<null>>"
case MDTEnum:
return v.Value.(EnumWrap).String()
} }
return "(err)" return "(err)"
} }
// rawValueForJson returns most-of-the-time the `Value` field
// but for some datatyes we do special processing
// all, so we can pluck the output value in json.Marshal without any suprises
func (v MetaValue) rawValueForJson() any {
if v.DataType == MDTAny {
if v.Value.(AnyWrap).IsNil {
return nil
}
if v.Value.(AnyWrap).IsError {
return bson.M{"@error": true}
}
jsonobj := primitive.M{}
jsonarr := primitive.A{}
if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonobj); err == nil {
return jsonobj
} else if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonarr); err == nil {
return jsonarr
} else {
return bson.M{"type": v.Value.(AnyWrap).Type, "data": v.Value.(AnyWrap).Json}
}
}
if v.DataType == MDTID {
if v.Value.(IDWrap).IsNil {
return nil
}
return v.Value.(IDWrap).Value
}
if v.DataType == MDTBytes {
return hex.EncodeToString(v.Value.([]byte))
}
if v.DataType == MDTDuration {
return v.Value.(time.Duration).String()
}
if v.DataType == MDTTime {
return v.Value.(time.Time).Format(time.RFC3339Nano)
}
if v.DataType == MDTObjectID {
return v.Value.(primitive.ObjectID).Hex()
}
if v.DataType == MDTNil {
return nil
}
if v.DataType == MDTEnum {
if v.Value.(EnumWrap).IsNil {
return nil
}
if v.Value.(EnumWrap).ValueRaw != nil {
return v.Value.(EnumWrap).ValueRaw
}
return v.Value.(EnumWrap).ValueString
}
return v.Value
}
func (mm MetaMap) FormatOneLine(singleMaxLen int) string { func (mm MetaMap) FormatOneLine(singleMaxLen int) string {
r := "" r := ""
@@ -629,9 +714,9 @@ func (mm MetaMap) Any() bool {
return len(mm) > 0 return len(mm) > 0
} }
func (mm MetaMap) Apply(evt *zerolog.Event) *zerolog.Event { func (mm MetaMap) Apply(evt *zerolog.Event, limitLen *int) *zerolog.Event {
for key, val := range mm { for key, val := range mm {
evt = val.Apply(key, evt) evt = val.Apply(key, evt, limitLen)
} }
return evt return evt
} }

View File

@@ -4,6 +4,7 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/enums"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"strings" "strings"
) )
@@ -131,3 +132,58 @@ func deserializeAnyWrap(v string) AnyWrap {
} }
} }
} }
type EnumWrap struct {
Type string
ValueString string
ValueRaw enums.Enum // `ValueRaw` is lost during serialization roundtrip
IsNil bool
}
func newEnumWrap(val enums.Enum) EnumWrap {
t := fmt.Sprintf("%T", val)
arr := strings.Split(t, ".")
if len(arr) > 0 {
t = arr[len(arr)-1]
}
if langext.IsNil(val) {
return EnumWrap{Type: t, ValueString: "", ValueRaw: val, IsNil: true}
}
if enumstr, ok := val.(enums.StringEnum); ok {
return EnumWrap{Type: t, ValueString: enumstr.String(), ValueRaw: val, IsNil: false}
}
return EnumWrap{Type: t, ValueString: fmt.Sprintf("%v", val), ValueRaw: val, IsNil: false}
}
func (w EnumWrap) Serialize() string {
if w.IsNil {
return "!nil" + ":" + w.Type
}
return w.Type + ":" + w.ValueString
}
func (w EnumWrap) String() string {
if w.IsNil {
return w.Type + "<<nil>>"
}
return "[" + w.Type + "] " + w.ValueString
}
func deserializeEnumWrap(v string) EnumWrap {
r := strings.SplitN(v, ":", 2)
if len(r) == 2 && r[0] == "!nil" {
return EnumWrap{Type: r[1], ValueString: v, ValueRaw: nil, IsNil: true}
}
if len(r) == 0 {
return EnumWrap{}
} else if len(r) == 1 {
return EnumWrap{Type: "", ValueString: v, ValueRaw: nil, IsNil: false}
} else {
return EnumWrap{Type: r[0], ValueString: r[1], ValueRaw: nil, IsNil: false}
}
}

36
fsext/exists.go Normal file
View File

@@ -0,0 +1,36 @@
package fsext
import "os"
func PathExists(fp string) (bool, error) {
_, err := os.Stat(fp)
if err == nil {
return true, nil
}
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
func FileExists(fp string) (bool, error) {
stat, err := os.Stat(fp)
if err == nil {
return !stat.IsDir(), nil
}
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
func DirectoryExists(fp string) (bool, error) {
stat, err := os.Stat(fp)
if err == nil {
return stat.IsDir(), nil
}
if os.IsNotExist(err) {
return false, nil
}
return false, err
}

View File

@@ -14,6 +14,9 @@ type AppContext struct {
} }
func CreateAppContext(g *gin.Context, innerCtx context.Context, cancelFn context.CancelFunc) *AppContext { func CreateAppContext(g *gin.Context, innerCtx context.Context, cancelFn context.CancelFunc) *AppContext {
for key, value := range g.Keys {
innerCtx = context.WithValue(innerCtx, key, value)
}
return &AppContext{ return &AppContext{
inner: innerCtx, inner: innerCtx,
cancelFunc: cancelFn, cancelFunc: cancelFn,
@@ -38,6 +41,10 @@ func (ac *AppContext) Value(key any) any {
return ac.inner.Value(key) return ac.inner.Value(key)
} }
func (ac *AppContext) Set(key, value any) {
ac.inner = context.WithValue(ac.inner, key, value)
}
func (ac *AppContext) Cancel() { func (ac *AppContext) Cancel() {
ac.cancelled = true ac.cancelled = true
ac.cancelFunc() ac.cancelFunc()
@@ -50,10 +57,3 @@ func (ac *AppContext) RequestURI() string {
return "" return ""
} }
} }
func (ac *AppContext) FinishSuccess(res HTTPResponse) HTTPResponse {
if ac.cancelled {
panic("Cannot finish a cancelled request")
}
return res
}

View File

@@ -0,0 +1,12 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
)
func BodyBuffer(g *gin.Context) {
if g.Request.Body != nil {
g.Request.Body = dataext.NewBufferedReadCloser(g.Request.Body)
}
}

View File

@@ -1,8 +1,14 @@
package ginext package ginext
import ( import (
"fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/mathext"
"net"
"net/http" "net/http"
"strings"
"time" "time"
) )
@@ -12,10 +18,26 @@ type GinWrapper struct {
allowCors bool allowCors bool
ginDebug bool ginDebug bool
bufferBody bool
requestTimeout time.Duration requestTimeout time.Duration
routeSpecs []ginRouteSpec
} }
func NewEngine(allowCors bool, ginDebug bool, timeout time.Duration) *GinWrapper { type ginRouteSpec struct {
Method string
URL string
Middlewares []string
Handler string
}
// NewEngine creates a new (wrapped) ginEngine
// Parameters are:
// - [allowCors] Add cors handler to allow all CORS requests on the default http methods
// - [ginDebug] Set gin.debug to true (adds more logs)
// - [bufferBody] Buffers the input body stream, this way the ginext error handler can later include the whole request body
// - [timeout] The default handler timeout
func NewEngine(allowCors bool, ginDebug bool, bufferBody bool, timeout time.Duration) *GinWrapper {
engine := gin.New() engine := gin.New()
wrapper := &GinWrapper{ wrapper := &GinWrapper{
@@ -23,6 +45,7 @@ func NewEngine(allowCors bool, ginDebug bool, timeout time.Duration) *GinWrapper
SuppressGinLogs: false, SuppressGinLogs: false,
allowCors: allowCors, allowCors: allowCors,
ginDebug: ginDebug, ginDebug: ginDebug,
bufferBody: bufferBody,
requestTimeout: timeout, requestTimeout: timeout,
} }
@@ -33,18 +56,94 @@ func NewEngine(allowCors bool, ginDebug bool, timeout time.Duration) *GinWrapper
engine.Use(CorsMiddleware()) engine.Use(CorsMiddleware())
} }
if ginDebug { // do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
if !ginDebug {
gin.SetMode(gin.ReleaseMode)
ginlogger := gin.Logger() ginlogger := gin.Logger()
engine.Use(func(context *gin.Context) { engine.Use(func(context *gin.Context) {
if !wrapper.SuppressGinLogs { if !wrapper.SuppressGinLogs {
ginlogger(context) ginlogger(context)
} }
}) })
} else {
gin.SetMode(gin.DebugMode)
} }
return wrapper return wrapper
} }
func (w *GinWrapper) ServeHTTP(writer http.ResponseWriter, request *http.Request) { func (w *GinWrapper) ListenAndServeHTTP(addr string, postInit func(port string)) (chan error, *http.Server) {
w.engine.ServeHTTP(writer, request)
w.DebugPrintRoutes()
httpserver := &http.Server{
Addr: addr,
Handler: w.engine,
}
errChan := make(chan error)
go func() {
ln, err := net.Listen("tcp", httpserver.Addr)
if err != nil {
errChan <- err
return
}
_, port, err := net.SplitHostPort(ln.Addr().String())
if err != nil {
errChan <- err
return
}
log.Info().Str("address", httpserver.Addr).Msg("HTTP-Server started on http://localhost:" + port)
if postInit != nil {
postInit(port) // the net.Listener a few lines above is at this point actually already buffering requests
}
errChan <- httpserver.Serve(ln)
}()
return errChan, httpserver
}
func (w *GinWrapper) DebugPrintRoutes() {
if !w.ginDebug {
return
}
lines := make([][4]string, 0)
pad := [4]int{0, 0, 0, 0}
for _, spec := range w.routeSpecs {
line := [4]string{
spec.Method,
spec.URL,
strings.Join(spec.Middlewares, " -> "),
spec.Handler,
}
lines = append(lines, line)
pad[0] = mathext.Max(pad[0], len(line[0]))
pad[1] = mathext.Max(pad[1], len(line[1]))
pad[2] = mathext.Max(pad[2], len(line[2]))
pad[3] = mathext.Max(pad[3], len(line[3]))
}
for _, line := range lines {
fmt.Printf("Gin-Route: %s %s --> %s --> %s\n",
langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2),
langext.StrPadRight(line[1], " ", pad[1]),
langext.StrPadRight(line[2], " ", pad[2]),
langext.StrPadRight(line[3], " ", pad[3]))
}
} }

View File

@@ -25,7 +25,7 @@ func Wrap(w *GinWrapper, fn WHandlerFunc) gin.HandlerFunc {
Str("trace", stackTrace). Str("trace", stackTrace).
Build() Build()
wrap = APIError(g, err) wrap = Error(err)
} }
if g.Writer.Written() { if g.Writer.Written() {

View File

@@ -1,11 +1,17 @@
package ginext package ginext
import ( import (
"bytes"
"context" "context"
"fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/gin-gonic/gin/binding" "github.com/gin-gonic/gin/binding"
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"io"
"runtime/debug" "runtime/debug"
"time"
) )
type PreContext struct { type PreContext struct {
@@ -14,7 +20,10 @@ type PreContext struct {
uri any uri any
query any query any
body any body any
rawbody *[]byte
form any form any
header any
timeout *time.Duration
} }
func (pctx *PreContext) URI(uri any) *PreContext { func (pctx *PreContext) URI(uri any) *PreContext {
@@ -32,45 +41,117 @@ func (pctx *PreContext) Body(body any) *PreContext {
return pctx return pctx
} }
func (pctx *PreContext) RawBody(rawbody *[]byte) *PreContext {
pctx.rawbody = rawbody
return pctx
}
func (pctx *PreContext) Form(form any) *PreContext { func (pctx *PreContext) Form(form any) *PreContext {
pctx.form = form pctx.form = form
return pctx return pctx
} }
func (pctx *PreContext) Header(header any) *PreContext {
pctx.header = header
return pctx
}
func (pctx *PreContext) WithTimeout(to time.Duration) *PreContext {
pctx.timeout = &to
return pctx
}
func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if pctx.uri != nil { if pctx.uri != nil {
if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil { if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil {
return nil, nil, langext.Ptr(APIError(pctx.ginCtx, commonApiErr.BindFailURI, "Failed to read uri", err)) err = exerr.Wrap(err, "Failed to read uri").
WithType(exerr.TypeBindFailURI).
Str("struct_type", fmt.Sprintf("%T", pctx.uri)).
Build()
return nil, nil, langext.Ptr(Error(err))
} }
} }
if pctx.query != nil { if pctx.query != nil {
if err := pctx.ginCtx.ShouldBindQuery(pctx.query); err != nil { if err := pctx.ginCtx.ShouldBindQuery(pctx.query); err != nil {
return nil, nil, langext.Ptr(APIError(pctx.ginCtx, commonApiErr.BindFailQuery, "Failed to read query", err)) err = exerr.Wrap(err, "Failed to read query").
WithType(exerr.TypeBindFailQuery).
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
Build()
return nil, nil, langext.Ptr(Error(err))
} }
} }
if pctx.body != nil { if pctx.body != nil {
if pctx.ginCtx.ContentType() == "application/json" { if pctx.ginCtx.ContentType() == "application/json" {
if err := pctx.ginCtx.ShouldBindJSON(pctx.body); err != nil { if err := pctx.ginCtx.ShouldBindJSON(pctx.body); err != nil {
return nil, nil, langext.Ptr(APIError(pctx.ginCtx, commonApiErr.BindFailJSON, "Failed to read body", err)) err = exerr.Wrap(err, "Failed to read json-body").
WithType(exerr.TypeBindFailJSON).
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(Error(err))
} }
} else { } else {
return nil, nil, langext.Ptr(APIError(pctx.ginCtx, commonApiErr.BindFailJSON, "missing JSON body", nil)) err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(Error(err))
}
}
if pctx.rawbody != nil {
if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok {
v, err := brc.BufferedAll()
if err != nil {
return nil, nil, langext.Ptr(Error(err))
}
*pctx.rawbody = v
} else {
buf := &bytes.Buffer{}
_, err := io.Copy(buf, pctx.ginCtx.Request.Body)
if err != nil {
return nil, nil, langext.Ptr(Error(err))
}
*pctx.rawbody = buf.Bytes()
} }
} }
if pctx.form != nil { if pctx.form != nil {
if pctx.ginCtx.ContentType() == "multipart/form-data" { if pctx.ginCtx.ContentType() == "multipart/form-data" {
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
return nil, nil, langext.Ptr(APIError(pctx.ginCtx, commonApiErr.BindFailFormData, "Failed to read multipart-form", err)) err = exerr.Wrap(err, "Failed to read multipart-form").
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
}
} else if pctx.ginCtx.ContentType() == "application/x-www-form-urlencoded" {
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
err = exerr.Wrap(err, "Failed to read urlencoded-form").
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
} }
} else { } else {
return nil, nil, langext.Ptr(APIError(pctx.ginCtx, commonApiErr.BindFailJSON, "missing form body", nil)) err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
} }
} }
ictx, cancel := context.WithTimeout(context.Background(), pctx.wrapper.requestTimeout) if pctx.header != nil {
if err := pctx.ginCtx.ShouldBindHeader(pctx.header); err != nil {
err = exerr.Wrap(err, "Failed to read header").
WithType(exerr.TypeBindFailHeader).
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
Build()
return nil, nil, langext.Ptr(Error(err))
}
}
ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout))
actx := CreateAppContext(pctx.ginCtx, ictx, cancel) actx := CreateAppContext(pctx.ginCtx, ictx, cancel)
return actx, pctx.ginCtx, nil return actx, pctx.ginCtx, nil

View File

@@ -7,50 +7,97 @@ import (
json "gogs.mikescher.com/BlackForestBytes/goext/gojson" json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
) )
type headerval struct {
Key string
Val string
}
type HTTPResponse interface { type HTTPResponse interface {
Write(g *gin.Context) Write(g *gin.Context)
WithHeader(k string, v string) HTTPResponse
} }
type jsonHTTPResponse struct { type jsonHTTPResponse struct {
statusCode int statusCode int
data any data any
headers []headerval
} }
func (j jsonHTTPResponse) Write(g *gin.Context) { func (j jsonHTTPResponse) Write(g *gin.Context) {
g.Render(j.statusCode, json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true}) for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
var f *string
if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" {
f = &jsonfilter
}
g.Render(j.statusCode, json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f})
}
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
} }
type emptyHTTPResponse struct { type emptyHTTPResponse struct {
statusCode int statusCode int
headers []headerval
} }
func (j emptyHTTPResponse) Write(g *gin.Context) { func (j emptyHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
g.Status(j.statusCode) g.Status(j.statusCode)
} }
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
type textHTTPResponse struct { type textHTTPResponse struct {
statusCode int statusCode int
data string data string
headers []headerval
} }
func (j textHTTPResponse) Write(g *gin.Context) { func (j textHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
g.String(j.statusCode, "%s", j.data) g.String(j.statusCode, "%s", j.data)
} }
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
type dataHTTPResponse struct { type dataHTTPResponse struct {
statusCode int statusCode int
data []byte data []byte
contentType string contentType string
headers []headerval
} }
func (j dataHTTPResponse) Write(g *gin.Context) { func (j dataHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
g.Data(j.statusCode, j.contentType, j.data) g.Data(j.statusCode, j.contentType, j.data)
} }
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
type fileHTTPResponse struct { type fileHTTPResponse struct {
mimetype string mimetype string
filepath string filepath string
filename *string filename *string
headers []headerval
} }
func (j fileHTTPResponse) Write(g *gin.Context) { func (j fileHTTPResponse) Write(g *gin.Context) {
@@ -59,26 +106,71 @@ func (j fileHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
} }
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
g.File(j.filepath) g.File(j.filepath)
} }
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
type downloadDataHTTPResponse struct {
statusCode int
mimetype string
data []byte
filename *string
headers []headerval
}
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
g.Data(j.statusCode, j.mimetype, j.data)
}
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
type redirectHTTPResponse struct { type redirectHTTPResponse struct {
statusCode int statusCode int
url string url string
headers []headerval
} }
func (j redirectHTTPResponse) Write(g *gin.Context) { func (j redirectHTTPResponse) Write(g *gin.Context) {
g.Redirect(j.statusCode, j.url) g.Redirect(j.statusCode, j.url)
} }
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
type jsonAPIErrResponse struct { type jsonAPIErrResponse struct {
err *exerr.ExErr err *exerr.ExErr
headers []headerval
} }
func (j jsonAPIErrResponse) Write(g *gin.Context) { func (j jsonAPIErrResponse) Write(g *gin.Context) {
j.err.Output(g) j.err.Output(g)
} }
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func Status(sc int) HTTPResponse { func Status(sc int) HTTPResponse {
return &emptyHTTPResponse{statusCode: sc} return &emptyHTTPResponse{statusCode: sc}
} }
@@ -103,16 +195,26 @@ func Download(mimetype string, filepath string, filename string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename} return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
} }
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
}
func Redirect(sc int, newURL string) HTTPResponse { func Redirect(sc int, newURL string) HTTPResponse {
return &redirectHTTPResponse{statusCode: sc, url: newURL} return &redirectHTTPResponse{statusCode: sc, url: newURL}
} }
func APIError(g *gin.Context, e error) HTTPResponse { func Error(e error) HTTPResponse {
return &jsonAPIErrResponse{ return &jsonAPIErrResponse{
err: exerr.FromError(e), err: exerr.FromError(e),
} }
} }
func NotImplemented(g *gin.Context) HTTPResponse { func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
return APIError(g, exerr.New(exerr.TypeNotImplemented, "").Build()) return &jsonAPIErrResponse{
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
}
}
func NotImplemented() HTTPResponse {
return Error(exerr.New(exerr.TypeNotImplemented, "").Build())
} }

View File

@@ -2,7 +2,14 @@ package ginext
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"net/http" "net/http"
"path"
"reflect"
"regexp"
"runtime"
"strings"
) )
var anyMethods = []string{ var anyMethods = []string{
@@ -12,60 +19,97 @@ var anyMethods = []string{
} }
type GinRoutesWrapper struct { type GinRoutesWrapper struct {
wrapper *GinWrapper wrapper *GinWrapper
routes gin.IRouter routes gin.IRouter
absPath string
defaultHandler []gin.HandlerFunc
} }
type GinRouteBuilder struct { type GinRouteBuilder struct {
routes *GinRoutesWrapper routes *GinRoutesWrapper
methods []string method string
relPath string relPath string
absPath string
handlers []gin.HandlerFunc handlers []gin.HandlerFunc
} }
func (w *GinWrapper) Routes() *GinRoutesWrapper { func (w *GinWrapper) Routes() *GinRoutesWrapper {
return &GinRoutesWrapper{wrapper: w, routes: w.engine} return &GinRoutesWrapper{
wrapper: w,
routes: w.engine,
absPath: "",
defaultHandler: make([]gin.HandlerFunc, 0),
}
} }
func (w *GinRoutesWrapper) Group(relativePath string) *GinRoutesWrapper { func (w *GinRoutesWrapper) Group(relativePath string) *GinRoutesWrapper {
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes.Group(relativePath)} return &GinRoutesWrapper{
wrapper: w.wrapper,
routes: w.routes.Group(relativePath),
defaultHandler: langext.ArrCopy(w.defaultHandler),
absPath: joinPaths(w.absPath, relativePath),
}
}
func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper {
defHandler := langext.ArrCopy(w.defaultHandler)
defHandler = append(defHandler, middleware...)
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler}
}
func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper {
defHandler := langext.ArrCopy(w.defaultHandler)
defHandler = append(defHandler, func(g *gin.Context) {
g.Set("goext.jsonfilter", filter)
})
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler}
} }
func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: []string{http.MethodGet}, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route(http.MethodGet, relativePath)
} }
func (w *GinRoutesWrapper) POST(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) POST(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: []string{http.MethodPost}, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route(http.MethodPost, relativePath)
} }
func (w *GinRoutesWrapper) DELETE(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) DELETE(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: []string{http.MethodDelete}, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route(http.MethodDelete, relativePath)
} }
func (w *GinRoutesWrapper) PATCH(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) PATCH(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: []string{http.MethodPatch}, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route(http.MethodPatch, relativePath)
} }
func (w *GinRoutesWrapper) PUT(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) PUT(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: []string{http.MethodPut}, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route(http.MethodPut, relativePath)
} }
func (w *GinRoutesWrapper) OPTIONS(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) OPTIONS(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: []string{http.MethodOptions}, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route(http.MethodOptions, relativePath)
} }
func (w *GinRoutesWrapper) HEAD(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) HEAD(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: []string{http.MethodHead}, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route(http.MethodHead, relativePath)
} }
func (w *GinRoutesWrapper) COUNT(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) COUNT(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: []string{"COUNT"}, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route("COUNT", relativePath)
} }
func (w *GinRoutesWrapper) Any(relativePath string) *GinRouteBuilder { func (w *GinRoutesWrapper) Any(relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{routes: w, methods: anyMethods, relPath: relativePath, handlers: make([]gin.HandlerFunc, 0)} return w._route("*", relativePath)
}
func (w *GinRoutesWrapper) _route(method string, relativePath string) *GinRouteBuilder {
return &GinRouteBuilder{
routes: w,
method: method,
relPath: relativePath,
absPath: joinPaths(w.absPath, relativePath),
handlers: langext.ArrCopy(w.defaultHandler),
}
} }
func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder { func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
@@ -73,13 +117,110 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
return w return w
} }
func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder {
w.handlers = append(w.handlers, func(g *gin.Context) {
g.Set("goext.jsonfilter", filter)
})
return w
}
func (w *GinRouteBuilder) Handle(handler WHandlerFunc) { func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {
w.handlers = append(w.handlers, Wrap(w.routes.wrapper, handler))
for _, m := range w.methods { if w.routes.wrapper.bufferBody {
w.routes.routes.Handle(m, w.relPath, w.handlers...) arr := make([]gin.HandlerFunc, 0, len(w.handlers)+1)
arr = append(arr, BodyBuffer)
arr = append(arr, w.handlers...)
w.handlers = arr
} }
middlewareNames := langext.ArrMap(w.handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) })
handlerName := nameOfFunction(handler)
w.handlers = append(w.handlers, Wrap(w.routes.wrapper, handler))
methodName := w.method
if w.method == "*" {
methodName = "ANY"
for _, method := range anyMethods {
w.routes.routes.Handle(method, w.relPath, w.handlers...)
}
} else {
w.routes.routes.Handle(w.method, w.relPath, w.handlers...)
}
w.routes.wrapper.routeSpecs = append(w.routes.wrapper.routeSpecs, ginRouteSpec{
Method: methodName,
URL: w.absPath,
Middlewares: middlewareNames,
Handler: handlerName,
})
} }
func (w *GinWrapper) NoRoute(handler WHandlerFunc) { func (w *GinWrapper) NoRoute(handler WHandlerFunc) {
w.engine.NoRoute(Wrap(w, handler))
handlers := make([]gin.HandlerFunc, 0)
if w.bufferBody {
handlers = append(handlers, BodyBuffer)
}
middlewareNames := langext.ArrMap(handlers, func(v gin.HandlerFunc) string { return nameOfFunction(v) })
handlerName := nameOfFunction(handler)
handlers = append(handlers, Wrap(w, handler))
w.engine.NoRoute(handlers...)
w.routeSpecs = append(w.routeSpecs, ginRouteSpec{
Method: "ANY",
URL: "[NO_ROUTE]",
Middlewares: middlewareNames,
Handler: handlerName,
})
}
func nameOfFunction(f any) string {
fname := runtime.FuncForPC(reflect.ValueOf(f).Pointer()).Name()
split := strings.Split(fname, "/")
if len(split) == 0 {
return ""
}
fname = split[len(split)-1]
// https://stackoverflow.com/a/32925345/1761622
if strings.HasSuffix(fname, "-fm") {
fname = fname[:len(fname)-len("-fm")]
}
suffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`))
if match, ok := suffix.MatchFirst(fname); ok {
fname = fname[:len(fname)-match.FullMatch().Length()]
}
return fname
}
// joinPaths is copied verbatim from gin@v1.9.1/gin.go
func joinPaths(absolutePath, relativePath string) string {
if relativePath == "" {
return absolutePath
}
finalPath := path.Join(absolutePath, relativePath)
if lastChar(relativePath) == '/' && lastChar(finalPath) != '/' {
return finalPath + "/"
}
return finalPath
}
func lastChar(str string) uint8 {
if str == "" {
panic("The length of the string can't be 0")
}
return str[len(str)-1]
} }

35
go.mod
View File

@@ -6,45 +6,44 @@ require (
github.com/gin-gonic/gin v1.9.1 github.com/gin-gonic/gin v1.9.1
github.com/jmoiron/sqlx v1.3.5 github.com/jmoiron/sqlx v1.3.5
github.com/rs/xid v1.5.0 github.com/rs/xid v1.5.0
github.com/rs/zerolog v1.29.1 github.com/rs/zerolog v1.31.0
go.mongodb.org/mongo-driver v1.12.0 go.mongodb.org/mongo-driver v1.13.0
golang.org/x/crypto v0.11.0 golang.org/x/crypto v0.15.0
golang.org/x/sys v0.10.0 golang.org/x/sys v0.14.0
golang.org/x/term v0.10.0 golang.org/x/term v0.14.0
) )
require ( require (
github.com/bytedance/sonic v1.10.0-rc2 // indirect github.com/bytedance/sonic v1.10.2 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/chenzhuoyu/iasm v0.9.0 // indirect github.com/chenzhuoyu/iasm v0.9.1 // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect github.com/gabriel-vasile/mimetype v1.4.3 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.14.1 // indirect github.com/go-playground/validator/v10 v10.16.0 // indirect
github.com/goccy/go-json v0.10.2 // indirect github.com/goccy/go-json v0.10.2 // indirect
github.com/golang/snappy v0.0.4 // indirect github.com/golang/snappy v0.0.4 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.16.7 // indirect github.com/klauspost/compress v1.17.2 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/klauspost/cpuid/v2 v2.2.6 // indirect
github.com/leodido/go-urn v1.2.4 // indirect github.com/leodido/go-urn v1.2.4 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.19 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect github.com/montanaflynn/stats v0.7.1 // indirect
github.com/pelletier/go-toml/v2 v2.0.9 // indirect github.com/pelletier/go-toml/v2 v2.1.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect github.com/ugorji/go/codec v1.2.11 // indirect
github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect
github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
golang.org/x/arch v0.4.0 // indirect golang.org/x/arch v0.6.0 // indirect
golang.org/x/net v0.12.0 // indirect golang.org/x/net v0.18.0 // indirect
golang.org/x/sync v0.3.0 // indirect golang.org/x/sync v0.5.0 // indirect
golang.org/x/text v0.11.0 // indirect golang.org/x/text v0.14.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect google.golang.org/protobuf v1.31.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

137
go.sum
View File

@@ -1,197 +1,198 @@
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
github.com/bytedance/sonic v1.10.0-rc2 h1:oDfRZ+4m6AYCOC0GFeOCeYqvBmucy1isvouS2K0cPzo= github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
github.com/bytedance/sonic v1.10.0-rc2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo= github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.14.1 h1:9c50NUPC30zyuKprjL3vNZ0m5oG+jU0zvx4AqHGnv4k= github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24=
github.com/go-playground/validator/v10 v10.14.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE=
github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM=
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/compress v1.17.1 h1:NE3C767s2ak2bweCZo3+rdP4U/HoyVXLv/X9f2gPS5g=
github.com/klauspost/compress v1.17.1/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.29.0 h1:Zes4hju04hjbvkVkOhdl2HpZa+0PmVwigmo8XoORE5w= github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0= github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/rs/zerolog v1.29.1 h1:cO+d60CHkknCbvzEWxP0S9K6KqyTjrCNUy1LdQLCGPc=
github.com/rs/zerolog v1.29.1/go.mod h1:Le6ESbR7hc+DP6Lt1THiV8CQSdkkNrd3R0XbEgp3ZBU=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E=
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs=
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA=
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.mongodb.org/mongo-driver v1.11.2 h1:+1v2rDQUWNcGW7/7E0Jvdz51V38XXxJfhzbV17aNHCw= go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE=
go.mongodb.org/mongo-driver v1.11.2/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8= go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
go.mongodb.org/mongo-driver v1.12.0 h1:aPx33jmn/rQuJXPQLZQ8NtfPQG8CaqgLThFtqRb0PiE= go.mongodb.org/mongo-driver v1.13.0 h1:67DgFFjYOCMWdtTEmKFpV3ffWlFnh+CYZ8ZS/tXWUfY=
go.mongodb.org/mongo-driver v1.12.0/go.mod h1:AZkxhPnFJUoH7kZlFkVKucV20K387miPfm7oimrSmK0= go.mongodb.org/mongo-driver v1.13.0/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.4.0 h1:A8WCeEWhLwPBKNbFi5Wv5UTCBx5zzubnXDlMOFAzFMc= golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
golang.org/x/arch v0.4.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.6.0 h1:S0JTfE48HbRj80+4tbvZDYsJ3tGv6BUU3XxyZ7CirAc=
golang.org/x/arch v0.6.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8= golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA=
golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos=
golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg=
golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE=
golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q=
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0 h1:g6Z6vPFA9dYBAF7DWcH6sCcOntplXsDKcliusYijMlw= golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI= golang.org/x/term v0.14.0 h1:LGK9IlZ8T9jvdy6cTdfKUCltatMFOehAQo9SRC46UQ8=
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww=
golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM=
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -1,5 +1,5 @@
package goext package goext
const GoextVersion = "0.0.191" const GoextVersion = "0.0.305"
const GoextVersionTimestamp = "2023-07-24T11:18:25+0200" const GoextVersionTimestamp = "2023-11-09T09:35:56+0100"

View File

@@ -156,7 +156,6 @@ import (
// an error. // an error.
func Marshal(v any) ([]byte, error) { func Marshal(v any) ([]byte, error) {
e := newEncodeState() e := newEncodeState()
defer encodeStatePool.Put(e)
err := e.marshal(v, encOpts{escapeHTML: true}) err := e.marshal(v, encOpts{escapeHTML: true})
if err != nil { if err != nil {
@@ -164,6 +163,8 @@ func Marshal(v any) ([]byte, error) {
} }
buf := append([]byte(nil), e.Bytes()...) buf := append([]byte(nil), e.Bytes()...)
encodeStatePool.Put(e)
return buf, nil return buf, nil
} }
@@ -174,9 +175,9 @@ type IndentOpt struct {
// MarshalSafeCollections is like Marshal except it will marshal nil maps and // MarshalSafeCollections is like Marshal except it will marshal nil maps and
// slices as '{}' and '[]' respectfully instead of 'null' // slices as '{}' and '[]' respectfully instead of 'null'
func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt) ([]byte, error) { func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt, filter *string) ([]byte, error) {
e := &encodeState{} e := &encodeState{}
err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps}) err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps, filter: filter})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -393,6 +394,9 @@ type encOpts struct {
nilSafeSlices bool nilSafeSlices bool
// nilSafeMaps marshals a nil maps '{}' instead of 'null' // nilSafeMaps marshals a nil maps '{}' instead of 'null'
nilSafeMaps bool nilSafeMaps bool
// filter matches jsonfilter tag of struct
// marshals if no jsonfilter is set or otherwise if jsonfilter has the filter value
filter *string
} }
type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts) type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts)
@@ -777,6 +781,8 @@ FieldLoop:
if f.omitEmpty && isEmptyValue(fv) { if f.omitEmpty && isEmptyValue(fv) {
continue continue
} else if opts.filter != nil && len(f.jsonfilter) > 0 && !f.jsonfilter.Contains(*opts.filter) {
continue
} }
e.WriteByte(next) e.WriteByte(next)
next = ',' next = ','
@@ -1220,15 +1226,28 @@ type field struct {
nameNonEsc string // `"` + name + `":` nameNonEsc string // `"` + name + `":`
nameEscHTML string // `"` + HTMLEscape(name) + `":` nameEscHTML string // `"` + HTMLEscape(name) + `":`
tag bool tag bool
index []int index []int
typ reflect.Type typ reflect.Type
omitEmpty bool omitEmpty bool
quoted bool jsonfilter jsonfilter
quoted bool
encoder encoderFunc encoder encoderFunc
} }
// jsonfilter stores the value of the jsonfilter struct tag
type jsonfilter []string
func (j jsonfilter) Contains(t string) bool {
for _, tag := range j {
if t == tag {
return true
}
}
return false
}
// byIndex sorts field by index sequence. // byIndex sorts field by index sequence.
type byIndex []field type byIndex []field
@@ -1304,6 +1323,13 @@ func typeFields(t reflect.Type) structFields {
if !isValidTag(name) { if !isValidTag(name) {
name = "" name = ""
} }
var jsonfilter []string
jsonfilterTag := sf.Tag.Get("jsonfilter")
if jsonfilterTag != "" && jsonfilterTag != "-" {
jsonfilter = strings.Split(jsonfilterTag, ",")
}
index := make([]int, len(f.index)+1) index := make([]int, len(f.index)+1)
copy(index, f.index) copy(index, f.index)
index[len(f.index)] = i index[len(f.index)] = i
@@ -1334,12 +1360,13 @@ func typeFields(t reflect.Type) structFields {
name = sf.Name name = sf.Name
} }
field := field{ field := field{
name: name, name: name,
tag: tagged, tag: tagged,
index: index, index: index,
typ: ft, typ: ft,
omitEmpty: opts.Contains("omitempty"), omitEmpty: opts.Contains("omitempty"),
quoted: quoted, jsonfilter: jsonfilter,
quoted: quoted,
} }
field.nameBytes = []byte(field.name) field.nameBytes = []byte(field.name)
field.equalFold = foldFunc(field.nameBytes) field.equalFold = foldFunc(field.nameBytes)

View File

@@ -1253,6 +1253,10 @@ func TestMarshalSafeCollections(t *testing.T) {
nilMapStruct struct { nilMapStruct struct {
NilMap map[string]interface{} `json:"nil_map"` NilMap map[string]interface{} `json:"nil_map"`
} }
testWithFilter struct {
Test1 string `json:"test1" jsonfilter:"FILTERONE"`
Test2 string `json:"test2" jsonfilter:"FILTERTWO"`
}
) )
tests := []struct { tests := []struct {
@@ -1271,10 +1275,12 @@ func TestMarshalSafeCollections(t *testing.T) {
{map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}"}, {map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}"},
{pNilMap, "null"}, {pNilMap, "null"},
{nilMapStruct{}, "{\"nil_map\":{}}"}, {nilMapStruct{}, "{\"nil_map\":{}}"},
{testWithFilter{}, "{\"test1\":\"\"}"},
} }
filter := "FILTERONE"
for i, tt := range tests { for i, tt := range tests {
b, err := MarshalSafeCollections(tt.in, true, true, nil) b, err := MarshalSafeCollections(tt.in, true, true, nil, &filter)
if err != nil { if err != nil {
t.Errorf("test %d, unexpected failure: %v", i, err) t.Errorf("test %d, unexpected failure: %v", i, err)
} }

View File

@@ -97,7 +97,10 @@ func equalFoldRight(s, t []byte) bool {
t = t[size:] t = t[size:]
} }
return len(t) == 0 if len(t) > 0 {
return false
}
return true
} }
// asciiEqualFold is a specialization of bytes.EqualFold for use when // asciiEqualFold is a specialization of bytes.EqualFold for use when

View File

@@ -52,7 +52,9 @@ func TestFold(t *testing.T) {
} }
func TestFoldAgainstUnicode(t *testing.T) { func TestFoldAgainstUnicode(t *testing.T) {
var buf1, buf2 []byte const bufSize = 5
buf1 := make([]byte, 0, bufSize)
buf2 := make([]byte, 0, bufSize)
var runes []rune var runes []rune
for i := 0x20; i <= 0x7f; i++ { for i := 0x20; i <= 0x7f; i++ {
runes = append(runes, rune(i)) runes = append(runes, rune(i))
@@ -94,8 +96,12 @@ func TestFoldAgainstUnicode(t *testing.T) {
continue continue
} }
for _, r2 := range runes { for _, r2 := range runes {
buf1 = append(utf8.AppendRune(append(buf1[:0], 'x'), r), 'x') buf1 := append(buf1[:0], 'x')
buf2 = append(utf8.AppendRune(append(buf2[:0], 'x'), r2), 'x') buf2 := append(buf2[:0], 'x')
buf1 = buf1[:1+utf8.EncodeRune(buf1[1:bufSize], r)]
buf2 = buf2[:1+utf8.EncodeRune(buf2[1:bufSize], r2)]
buf1 = append(buf1, 'x')
buf2 = append(buf2, 'x')
want := bytes.EqualFold(buf1, buf2) want := bytes.EqualFold(buf1, buf2)
if got := ff.fold(buf1, buf2); got != want { if got := ff.fold(buf1, buf2); got != want {
t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want) t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want)

View File

@@ -17,6 +17,7 @@ type GoJsonRender struct {
NilSafeSlices bool NilSafeSlices bool
NilSafeMaps bool NilSafeMaps bool
Indent *IndentOpt Indent *IndentOpt
Filter *string
} }
func (r GoJsonRender) Render(w http.ResponseWriter) error { func (r GoJsonRender) Render(w http.ResponseWriter) error {
@@ -25,7 +26,7 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error {
header["Content-Type"] = []string{"application/json; charset=utf-8"} header["Content-Type"] = []string{"application/json; charset=utf-8"}
} }
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent) jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@@ -116,3 +116,18 @@ func TestNumberIsValid(t *testing.T) {
} }
} }
} }
func BenchmarkNumberIsValid(b *testing.B) {
s := "-61657.61667E+61673"
for i := 0; i < b.N; i++ {
isValidNumber(s)
}
}
func BenchmarkNumberIsValidRegexp(b *testing.B) {
var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
s := "-61657.61667E+61673"
for i := 0; i < b.N; i++ {
jsonNumberRegexp.MatchString(s)
}
}

View File

@@ -594,7 +594,7 @@ func (s *scanner) error(c byte, context string) int {
return scanError return scanError
} }
// quoteChar formats c as a quoted character literal. // quoteChar formats c as a quoted character literal
func quoteChar(c byte) string { func quoteChar(c byte) string {
// special cases - different from quoted strings // special cases - different from quoted strings
if c == '\'' { if c == '\'' {

View File

@@ -179,11 +179,9 @@ func nonSpace(b []byte) bool {
// An Encoder writes JSON values to an output stream. // An Encoder writes JSON values to an output stream.
type Encoder struct { type Encoder struct {
w io.Writer w io.Writer
err error err error
escapeHTML bool escapeHTML bool
nilSafeSlices bool
nilSafeMaps bool
indentBuf *bytes.Buffer indentBuf *bytes.Buffer
indentPrefix string indentPrefix string
@@ -204,11 +202,8 @@ func (enc *Encoder) Encode(v any) error {
if enc.err != nil { if enc.err != nil {
return enc.err return enc.err
} }
e := newEncodeState() e := newEncodeState()
defer encodeStatePool.Put(e) err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML, nilSafeMaps: enc.nilSafeMaps, nilSafeSlices: enc.nilSafeSlices})
if err != nil { if err != nil {
return err return err
} }
@@ -236,6 +231,7 @@ func (enc *Encoder) Encode(v any) error {
if _, err = enc.w.Write(b); err != nil { if _, err = enc.w.Write(b); err != nil {
enc.err = err enc.err = err
} }
encodeStatePool.Put(e)
return err return err
} }
@@ -247,13 +243,6 @@ func (enc *Encoder) SetIndent(prefix, indent string) {
enc.indentValue = indent enc.indentValue = indent
} }
// SetNilSafeCollection specifies whether to represent nil slices and maps as
// '[]' or '{}' respectfully (flag on) instead of 'null' (default) when marshaling json.
func (enc *Encoder) SetNilSafeCollection(nilSafeSlices bool, nilSafeMaps bool) {
enc.nilSafeSlices = nilSafeSlices
enc.nilSafeMaps = nilSafeMaps
}
// SetEscapeHTML specifies whether problematic HTML characters // SetEscapeHTML specifies whether problematic HTML characters
// should be escaped inside JSON quoted strings. // should be escaped inside JSON quoted strings.
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e // The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e

View File

@@ -12,7 +12,6 @@ import (
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"reflect" "reflect"
"runtime/debug"
"strings" "strings"
"testing" "testing"
) )
@@ -42,7 +41,7 @@ false
func TestEncoder(t *testing.T) { func TestEncoder(t *testing.T) {
for i := 0; i <= len(streamTest); i++ { for i := 0; i <= len(streamTest); i++ {
var buf strings.Builder var buf bytes.Buffer
enc := NewEncoder(&buf) enc := NewEncoder(&buf)
// Check that enc.SetIndent("", "") turns off indentation. // Check that enc.SetIndent("", "") turns off indentation.
enc.SetIndent(">", ".") enc.SetIndent(">", ".")
@@ -60,43 +59,6 @@ func TestEncoder(t *testing.T) {
} }
} }
func TestEncoderErrorAndReuseEncodeState(t *testing.T) {
// Disable the GC temporarily to prevent encodeState's in Pool being cleaned away during the test.
percent := debug.SetGCPercent(-1)
defer debug.SetGCPercent(percent)
// Trigger an error in Marshal with cyclic data.
type Dummy struct {
Name string
Next *Dummy
}
dummy := Dummy{Name: "Dummy"}
dummy.Next = &dummy
var buf bytes.Buffer
enc := NewEncoder(&buf)
if err := enc.Encode(dummy); err == nil {
t.Errorf("Encode(dummy) == nil; want error")
}
type Data struct {
A string
I int
}
data := Data{A: "a", I: 1}
if err := enc.Encode(data); err != nil {
t.Errorf("Marshal(%v) = %v", data, err)
}
var data2 Data
if err := Unmarshal(buf.Bytes(), &data2); err != nil {
t.Errorf("Unmarshal(%v) = %v", data2, err)
}
if data2 != data {
t.Errorf("expect: %v, but get: %v", data, data2)
}
}
var streamEncodedIndent = `0.1 var streamEncodedIndent = `0.1
"hello" "hello"
null null
@@ -115,7 +77,7 @@ false
` `
func TestEncoderIndent(t *testing.T) { func TestEncoderIndent(t *testing.T) {
var buf strings.Builder var buf bytes.Buffer
enc := NewEncoder(&buf) enc := NewEncoder(&buf)
enc.SetIndent(">", ".") enc.SetIndent(">", ".")
for _, v := range streamTest { for _, v := range streamTest {
@@ -185,7 +147,7 @@ func TestEncoderSetEscapeHTML(t *testing.T) {
`{"bar":"\"<html>foobar</html>\""}`, `{"bar":"\"<html>foobar</html>\""}`,
}, },
} { } {
var buf strings.Builder var buf bytes.Buffer
enc := NewEncoder(&buf) enc := NewEncoder(&buf)
if err := enc.Encode(tt.v); err != nil { if err := enc.Encode(tt.v); err != nil {
t.Errorf("Encode(%s): %s", tt.name, err) t.Errorf("Encode(%s): %s", tt.name, err)
@@ -347,6 +309,21 @@ func TestBlocking(t *testing.T) {
} }
} }
func BenchmarkEncoderEncode(b *testing.B) {
b.ReportAllocs()
type T struct {
X, Y string
}
v := &T{"foo", "bar"}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := NewEncoder(io.Discard).Encode(v); err != nil {
b.Fatal(err)
}
}
})
}
type tokenStreamCase struct { type tokenStreamCase struct {
json string json string
expTokens []any expTokens []any
@@ -495,45 +472,3 @@ func TestHTTPDecoding(t *testing.T) {
t.Errorf("err = %v; want io.EOF", err) t.Errorf("err = %v; want io.EOF", err)
} }
} }
func TestEncoderSetNilSafeCollection(t *testing.T) {
var (
nilSlice []interface{}
pNilSlice *[]interface{}
nilMap map[string]interface{}
pNilMap *map[string]interface{}
)
for _, tt := range []struct {
name string
v interface{}
want string
rescuedWant string
}{
{"nilSlice", nilSlice, "null", "[]"},
{"nonNilSlice", []interface{}{}, "[]", "[]"},
{"sliceWithValues", []interface{}{1, 2, 3}, "[1,2,3]", "[1,2,3]"},
{"pNilSlice", pNilSlice, "null", "null"},
{"nilMap", nilMap, "null", "{}"},
{"nonNilMap", map[string]interface{}{}, "{}", "{}"},
{"mapWithValues", map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}", "{\"1\":1,\"2\":2,\"3\":3}"},
{"pNilMap", pNilMap, "null", "null"},
} {
var buf bytes.Buffer
enc := NewEncoder(&buf)
if err := enc.Encode(tt.v); err != nil {
t.Fatalf("Encode(%s): %s", tt.name, err)
}
if got := strings.TrimSpace(buf.String()); got != tt.want {
t.Errorf("Encode(%s) = %#q, want %#q", tt.name, got, tt.want)
}
buf.Reset()
enc.SetNilSafeCollection(true, true)
if err := enc.Encode(tt.v); err != nil {
t.Fatalf("SetNilSafeCollection(true) Encode(%s): %s", tt.name, err)
}
if got := strings.TrimSpace(buf.String()); got != tt.rescuedWant {
t.Errorf("SetNilSafeCollection(true) Encode(%s) = %#q, want %#q",
tt.name, got, tt.want)
}
}
}

View File

@@ -400,7 +400,7 @@ func ArrCastErr[T1 any, T2 any](arr []T1) ([]T2, error) {
if vcast, ok := any(v).(T2); ok { if vcast, ok := any(v).(T2); ok {
r[i] = vcast r[i] = vcast
} else { } else {
return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2))) return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2)))
} }
} }
return r, nil return r, nil
@@ -412,7 +412,7 @@ func ArrCastPanic[T1 any, T2 any](arr []T1) []T2 {
if vcast, ok := any(v).(T2); ok { if vcast, ok := any(v).(T2); ok {
r[i] = vcast r[i] = vcast
} else { } else {
panic(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2))) panic(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2)))
} }
} }
return r return r
@@ -467,3 +467,15 @@ func ArrayToInterface[T any](t []T) []interface{} {
} }
return res return res
} }
func JoinString(arr []string, delimiter string) string {
str := ""
for i, v := range arr {
str += v
if i < len(arr)-1 {
str += delimiter
}
}
return str
}

12
langext/array_test.go Normal file
View File

@@ -0,0 +1,12 @@
package langext
import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
)
func TestJoinString(t *testing.T) {
ids := []string{"1", "2", "3"}
res := JoinString(ids, ",")
tst.AssertEqual(t, res, "1,2,3")
}

View File

@@ -1,6 +1,7 @@
package langext package langext
import ( import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing" "testing"
) )
@@ -59,9 +60,3 @@ func TestBase58FlickrDecoding(t *testing.T) {
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "9aJCVZR"), "Hello") tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "9aJCVZR"), "Hello")
tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "48638rmBiUzG5NKQoX4KcuE5C8paCFACnE28F7qDx13PRtennAmYSSJQ5gJSRihf5ZDyEQS4UimtihR7uARt4wbty2fW9duTQTM9n1DwUBevreyzGwu6W4YSgrvQgCPDxsiE1mCdZsF8VEBpuHHEiJyw"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.") tst.AssertEqual(t, _decStr(t, Base58FlickrEncoding, "48638rmBiUzG5NKQoX4KcuE5C8paCFACnE28F7qDx13PRtennAmYSSJQ5gJSRihf5ZDyEQS4UimtihR7uARt4wbty2fW9duTQTM9n1DwUBevreyzGwu6W4YSgrvQgCPDxsiE1mCdZsF8VEBpuHHEiJyw"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.")
} }
func tst.AssertEqual(t *testing.T, actual string, expected string) {
if actual != expected {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
}
}

178
langext/baseAny.go Normal file
View File

@@ -0,0 +1,178 @@
package langext
import (
"crypto/rand"
"errors"
"math"
"math/big"
)
type AnyBaseConverter struct {
base uint64
charset []rune
}
func NewAnyBaseConverter(cs string) AnyBaseConverter {
rcs := []rune(cs)
return AnyBaseConverter{
base: uint64(len(rcs)),
charset: rcs,
}
}
func (bc AnyBaseConverter) Rand(rlen int) string {
biBase := big.NewInt(int64(bc.base))
randMax := big.NewInt(math.MaxInt64)
r := ""
for i := 0; i < rlen; i++ {
v, err := rand.Int(rand.Reader, randMax)
if err != nil {
panic(err)
}
r += string(bc.charset[v.Mod(v, biBase).Int64()])
}
return r
}
func (bc AnyBaseConverter) EncodeUInt64(num uint64) string {
if num == 0 {
return "0"
}
b := ""
// loop as long the num is bigger than zero
for num > 0 {
r := num % bc.base
num -= r
num /= base62Base
b += string(bc.charset[int(r)])
}
return b
}
func (bc AnyBaseConverter) DecodeUInt64(str string) (uint64, error) {
if str == "" {
return 0, errors.New("empty string")
}
result := uint64(0)
for _, v := range str {
result *= base62Base
pos := ArrFirstIndex(bc.charset, v)
if pos == -1 {
return 0, errors.New("invalid character: " + string(v))
}
result += uint64(pos)
}
return result, nil
}
func (bc AnyBaseConverter) Encode(src []byte) string {
value := new(big.Int)
value.SetBytes(src)
return bc.EncodeBigInt(value)
}
func (bc AnyBaseConverter) EncodeBigInt(src *big.Int) string {
value := new(big.Int)
value.Set(src)
isneg := value.Sign() < 0
answer := ""
if isneg {
value.Neg(value)
}
biBase := big.NewInt(int64(bc.base))
rem := new(big.Int)
for value.Sign() > 0 {
value.QuoRem(value, biBase, rem)
answer = string(bc.charset[rem.Int64()]) + answer
}
if isneg {
return "-" + answer
} else {
return answer
}
}
func (bc AnyBaseConverter) Decode(src string) ([]byte, error) {
value, err := bc.DecodeToBigInt(src)
if err != nil {
return nil, err
}
return value.Bytes(), nil
}
func (bc AnyBaseConverter) DecodeToBigInt(_src string) (*big.Int, error) {
result := new(big.Int)
result.SetInt64(0)
src := []rune(_src)
if len(src) == 0 {
return nil, errors.New("string is empty")
}
if bc.base < 2 {
return nil, errors.New("not enough digits")
}
i := 0
sign := new(big.Int)
sign.SetInt64(1)
if src[i] == '+' {
i++
} else if src[i] == '-' {
i++
sign.SetInt64(-1)
}
if i >= len(src) {
return nil, errors.New("no digits in input")
}
biBase := big.NewInt(int64(bc.base))
oldResult := new(big.Int)
for ; i < len(src); i++ {
n := ArrFirstIndex(bc.charset, src[i])
if n < 0 {
return nil, errors.New("invalid characters in input")
}
oldResult.Set(result)
result.Mul(result, biBase)
result.Add(result, big.NewInt(int64(n)))
if result.Cmp(oldResult) < 0 {
return nil, errors.New("overflow")
}
}
if sign.Cmp(big.NewInt(0)) < 0 {
result.Neg(result)
}
return result, nil
}

80
langext/baseAny_test.go Normal file
View File

@@ -0,0 +1,80 @@
package langext
import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
)
func _anyEncStr(bc AnyBaseConverter, v string) string {
vr := bc.Encode([]byte(v))
return vr
}
func _anyDecStr(bc AnyBaseConverter, v string) string {
vr, err := bc.Decode(v)
if err != nil {
panic(err)
}
return string(vr)
}
func TestAnyBase58DefaultEncoding(t *testing.T) {
tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "Hello"), "9Ajdvzr")
tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in."), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX")
}
func TestAnyBase58DefaultDecoding(t *testing.T) {
tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "9Ajdvzr"), "Hello")
tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.")
}
func TestAnyBaseDecode(t *testing.T) {
const (
Binary = "01"
Decimal = "0123456789"
Hex = "0123456789ABCDEF"
DNA = "ACGT"
Base32 = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"
Base58 = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
Base62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
Base64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
Base256 = "🚀🪐☄🛰🌌🌑🌒🌓🌔🌕🌖🌗🌘🌍🌏🌎🐉☀💻🖥💾💿😂❤😍🤣😊🙏💕😭😘👍😅👏😁🔥🥰💔💖💙😢🤔😆🙄💪😉☺👌🤗💜😔😎😇🌹🤦🎉💞✌✨🤷😱😌🌸🙌😋💗💚😏💛🙂💓🤩😄😀🖤😃💯🙈👇🎶😒🤭❣😜💋👀😪😑💥🙋😞😩😡🤪👊🥳😥🤤👉💃😳✋😚😝😴🌟😬🙃🍀🌷😻😓⭐✅🥺🌈😈🤘💦✔😣🏃💐☹🎊💘😠☝😕🌺🎂🌻😐🖕💝🙊😹🗣💫💀👑🎵🤞😛🔴😤🌼😫⚽🤙☕🏆🤫👈😮🙆🍻🍃🐶💁😲🌿🧡🎁⚡🌞🎈❌✊👋😰🤨😶🤝🚶💰🍓💢🤟🙁🚨💨🤬✈🎀🍺🤓😙💟🌱😖👶🥴▶➡❓💎💸⬇😨🌚🦋😷🕺⚠🙅😟😵👎🤲🤠🤧📌🔵💅🧐🐾🍒😗🤑🌊🤯🐷☎💧😯💆👆🎤🙇🍑❄🌴💣🐸💌📍🥀🤢👅💡💩👐📸👻🤐🤮🎼🥵🚩🍎🍊👼💍📣🥂"
)
type TestDef struct {
FromCS string
FromVal string
ToCS string
ToVal string
}
defs := []TestDef{
{Binary, "10100101011100000101010", Decimal, "5421098"},
{Decimal, "5421098", DNA, "CCAGGTGAAGGG"},
{Decimal, "5421098", DNA, "CCAGGTGAAGGG"},
{Decimal, "80085", Base256, "🪐💞🔵"},
{Hex, "48656C6C6C20576F526C5421", Base64, "SGVsbGwgV29SbFQh"},
{Base64, "SGVsbGw/gV29SbF+Qh", Base32, "CIMVWGY3B7QFO32SNRPZBB"},
{Base64, "SGVsbGw/gV29SbF+Qh", Base58, "2fUsGKQUcgQcwSqpvy6"},
{Base64, "SGVsbGw/gV29SbF+Qh", Base62, "V34nvybdQ3m3RHk9Sr"},
}
for _, def := range defs {
d1 := NewAnyBaseConverter(def.FromCS)
d2 := NewAnyBaseConverter(def.ToCS)
v1 := tst.Must(d1.Decode(def.FromVal))(t)
v2 := tst.Must(d2.Decode(def.ToVal))(t)
tst.AssertArrayEqual(t, v1, v2)
str2 := d2.Encode(v1)
tst.AssertEqual(t, str2, def.ToVal)
str1 := d1.Encode(v2)
tst.AssertEqual(t, str1, def.FromVal)
}
}

View File

@@ -1,7 +1,10 @@
package langext package langext
import "runtime/debug"
type PanicWrappedErr struct { type PanicWrappedErr struct {
panic any panic any
Stack string
} }
func (p PanicWrappedErr) Error() string { func (p PanicWrappedErr) Error() string {
@@ -15,7 +18,7 @@ func (p PanicWrappedErr) ReoveredObj() any {
func RunPanicSafe(fn func()) (err error) { func RunPanicSafe(fn func()) (err error) {
defer func() { defer func() {
if rec := recover(); rec != nil { if rec := recover(); rec != nil {
err = PanicWrappedErr{panic: rec} err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
} }
}() }()
@@ -27,7 +30,7 @@ func RunPanicSafe(fn func()) (err error) {
func RunPanicSafeR1(fn func() error) (err error) { func RunPanicSafeR1(fn func() error) (err error) {
defer func() { defer func() {
if rec := recover(); rec != nil { if rec := recover(); rec != nil {
err = PanicWrappedErr{panic: rec} err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
} }
}() }()
@@ -38,7 +41,7 @@ func RunPanicSafeR2[T1 any](fn func() (T1, error)) (r1 T1, err error) {
defer func() { defer func() {
if rec := recover(); rec != nil { if rec := recover(); rec != nil {
r1 = *new(T1) r1 = *new(T1)
err = PanicWrappedErr{panic: rec} err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
} }
}() }()
@@ -50,7 +53,7 @@ func RunPanicSafeR3[T1 any, T2 any](fn func() (T1, T2, error)) (r1 T1, r2 T2, er
if rec := recover(); rec != nil { if rec := recover(); rec != nil {
r1 = *new(T1) r1 = *new(T1)
r2 = *new(T2) r2 = *new(T2)
err = PanicWrappedErr{panic: rec} err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
} }
}() }()
@@ -63,7 +66,7 @@ func RunPanicSafeR4[T1 any, T2 any, T3 any](fn func() (T1, T2, T3, error)) (r1 T
r1 = *new(T1) r1 = *new(T1)
r2 = *new(T2) r2 = *new(T2)
r3 = *new(T3) r3 = *new(T3)
err = PanicWrappedErr{panic: rec} err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())}
} }
}() }()

View File

@@ -3,6 +3,8 @@ package mongoext
import ( import (
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec" "go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsontype"
"go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/rfctime" "gogs.mikescher.com/BlackForestBytes/goext/rfctime"
"reflect" "reflect"
) )
@@ -24,5 +26,9 @@ func CreateGoExtBsonRegistry() *bsoncodec.Registry {
bson.PrimitiveCodecs{}.RegisterPrimitiveCodecs(rb) bson.PrimitiveCodecs{}.RegisterPrimitiveCodecs(rb)
// otherwise we get []primitve.E when unmarshalling into any
// which will result in {'key': .., 'value': ...}[] json when json-marshalling
rb.RegisterTypeMapEntry(bsontype.EmbeddedDocument, reflect.TypeOf(primitive.M{}))
return rb.Build() return rb.Build()
} }

29
pagination/filter.go Normal file
View File

@@ -0,0 +1,29 @@
package pagination
import (
"go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
)
type Filter interface {
FilterQuery() mongo.Pipeline
Pagination() (string, ct.SortDirection)
}
type dynamicFilter struct {
pipeline mongo.Pipeline
sortField string
sortDir ct.SortDirection
}
func (d dynamicFilter) FilterQuery() mongo.Pipeline {
return d.pipeline
}
func (d dynamicFilter) Pagination() (string, ct.SortDirection) {
return d.sortField, d.sortDir
}
func CreateFilter(pipeline mongo.Pipeline, sortField string, sortdir ct.SortDirection) Filter {
return dynamicFilter{pipeline: pipeline, sortField: sortField, sortDir: sortdir}
}

16
pagination/pagination.go Normal file
View File

@@ -0,0 +1,16 @@
package pagination
type Pagination struct {
Page int `json:"page"` // page (first page == 1)
Limit int `json:"limit"` // max-page-size
TotalPages int `json:"totalPages"` // total page-count
TotalItems int `json:"totalItems"` // total items-count
CurrentPageCount int `json:"currntPageCount"` // item-count in current page ( == len(data) )
}
func CalcPaginationTotalPages(totalItems int, limit int) int {
if totalItems == 0 {
return 0
}
return 1 + (totalItems-1)/limit
}

View File

@@ -13,6 +13,7 @@ type Regex interface {
ReplaceAllFunc(haystack string, repl func(string) string) string ReplaceAllFunc(haystack string, repl func(string) string) string
RemoveAll(haystack string) string RemoveAll(haystack string) string
GroupCount() int GroupCount() int
String() string
} }
type regexWrapper struct { type regexWrapper struct {
@@ -42,6 +43,7 @@ func W(rex *regexp.Regexp) Regex {
// --------------------------------------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------------------------------
// IsMatch reports whether the string s contains any match of the regular expression re.
func (w *regexWrapper) IsMatch(haystack string) bool { func (w *regexWrapper) IsMatch(haystack string) bool {
return w.rex.MatchString(haystack) return w.rex.MatchString(haystack)
} }
@@ -88,6 +90,11 @@ func (w *regexWrapper) GroupCount() int {
return len(w.subnames) - 1 return len(w.subnames) - 1
} }
// String returns the source text used to compile the regular expression.
func (w *regexWrapper) String() string {
return w.rex.String()
}
// --------------------------------------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------------------------------
func (m RegexMatch) FullMatch() RegexMatchGroup { func (m RegexMatch) FullMatch() RegexMatchGroup {

View File

@@ -8,7 +8,7 @@ import (
func TestGroupByNameOrEmpty1(t *testing.T) { func TestGroupByNameOrEmpty1(t *testing.T) {
regex1 := W(regexp.MustCompile("0(?P<group1>A+)B(?P<group2>C+)0")) regex1 := W(regexp.MustCompile(`0(?P<group1>A+)B(?P<group2>C+)0`))
match1, ok1 := regex1.MatchFirst("Hello 0AAAABCCC0 Bye.") match1, ok1 := regex1.MatchFirst("Hello 0AAAABCCC0 Bye.")
@@ -26,7 +26,7 @@ func TestGroupByNameOrEmpty1(t *testing.T) {
func TestGroupByNameOrEmpty2(t *testing.T) { func TestGroupByNameOrEmpty2(t *testing.T) {
regex1 := W(regexp.MustCompile("0(?P<group1>A+)B(?P<group2>C+)(?P<group3>C+)?0")) regex1 := W(regexp.MustCompile(`0(?P<group1>A+)B(?P<group2>C+)(?P<group3>C+)?0`))
match1, ok1 := regex1.MatchFirst("Hello 0AAAABCCC0 Bye.") match1, ok1 := regex1.MatchFirst("Hello 0AAAABCCC0 Bye.")

View File

@@ -69,7 +69,7 @@ func (t *RFC3339Time) UnmarshalText(data []byte) error {
} }
func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bsontype.Null { if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct)) // we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values // Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597 // https://stackoverflow.com/questions/75167597
@@ -77,7 +77,7 @@ func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
*t = RFC3339Time{} *t = RFC3339Time{}
return nil return nil
} }
if bt != bsontype.DateTime { if bt != bson.TypeDateTime {
return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt)) return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339Time", bt))
} }
var tt time.Time var tt time.Time

View File

@@ -69,7 +69,7 @@ func (t *RFC3339NanoTime) UnmarshalText(data []byte) error {
} }
func (t *RFC3339NanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { func (t *RFC3339NanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bsontype.Null { if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct)) // we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values // Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597 // https://stackoverflow.com/questions/75167597
@@ -77,7 +77,7 @@ func (t *RFC3339NanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) erro
*t = RFC3339NanoTime{} *t = RFC3339NanoTime{}
return nil return nil
} }
if bt != bsontype.DateTime { if bt != bson.TypeDateTime {
return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339NanoTime", bt)) return errors.New(fmt.Sprintf("cannot unmarshal %v into RFC3339NanoTime", bt))
} }
var tt time.Time var tt time.Time

View File

@@ -2,6 +2,7 @@ package rfctime
import ( import (
"encoding/json" "encoding/json"
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
"gogs.mikescher.com/BlackForestBytes/goext/tst" "gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing" "testing"
"time" "time"
@@ -13,7 +14,7 @@ func TestRoundtrip(t *testing.T) {
Value RFC3339NanoTime `json:"v"` Value RFC3339NanoTime `json:"v"`
} }
val1 := NewRFC3339Nano(time.Unix(0, 1675951556820915171)) val1 := NewRFC3339Nano(time.Unix(0, 1675951556820915171).In(timeext.TimezoneBerlin))
w1 := Wrap{val1} w1 := Wrap{val1}
jstr1, err := json.Marshal(w1) jstr1, err := json.Marshal(w1)

View File

@@ -39,7 +39,7 @@ func HashSqliteSchema(ctx context.Context, schemaStr string) (string, error) {
return HashSqliteDatabase(ctx, db) return HashSqliteDatabase(ctx, db)
} }
func HashSqliteDatabase(ctx context.Context, db DB) (string, error) { func HashSqliteDatabase(ctx context.Context, db Queryable) (string, error) {
ss, err := CreateSqliteDatabaseSchemaString(ctx, db) ss, err := CreateSqliteDatabaseSchemaString(ctx, db)
if err != nil { if err != nil {
return "", err return "", err
@@ -50,7 +50,7 @@ func HashSqliteDatabase(ctx context.Context, db DB) (string, error) {
return hex.EncodeToString(cs[:]), nil return hex.EncodeToString(cs[:]), nil
} }
func CreateSqliteDatabaseSchemaString(ctx context.Context, db DB) (string, error) { func CreateSqliteDatabaseSchemaString(ctx context.Context, db Queryable) (string, error) {
type colInfo struct { type colInfo struct {
Name string `db:"name"` Name string `db:"name"`

View File

@@ -7,24 +7,40 @@ import (
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
type TxStatus string
const (
TxStatusInitial TxStatus = "INITIAL"
TxStatusActive TxStatus = "ACTIVE"
TxStatusComitted TxStatus = "COMMITTED"
TxStatusRollback TxStatus = "ROLLBACK"
)
type Tx interface { type Tx interface {
Rollback() error Rollback() error
Commit() error Commit() error
Status() TxStatus
Exec(ctx context.Context, sql string, prep PP) (sql.Result, error) Exec(ctx context.Context, sql string, prep PP) (sql.Result, error)
Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error) Query(ctx context.Context, sql string, prep PP) (*sqlx.Rows, error)
} }
type transaction struct { type transaction struct {
tx *sqlx.Tx tx *sqlx.Tx
id uint16 id uint16
lstr []Listener lstr []Listener
status TxStatus
execCtr int
queryCtr int
} }
func NewTransaction(xtx *sqlx.Tx, txid uint16, lstr []Listener) Tx { func NewTransaction(xtx *sqlx.Tx, txid uint16, lstr []Listener) Tx {
return &transaction{ return &transaction{
tx: xtx, tx: xtx,
id: txid, id: txid,
lstr: lstr, lstr: lstr,
status: TxStatusInitial,
execCtr: 0,
queryCtr: 0,
} }
} }
@@ -38,6 +54,10 @@ func (tx *transaction) Rollback() error {
result := tx.tx.Rollback() result := tx.tx.Rollback()
if result == nil {
tx.status = TxStatusRollback
}
for _, v := range tx.lstr { for _, v := range tx.lstr {
v.PostTxRollback(tx.id, result) v.PostTxRollback(tx.id, result)
} }
@@ -55,6 +75,10 @@ func (tx *transaction) Commit() error {
result := tx.tx.Commit() result := tx.tx.Commit()
if result == nil {
tx.status = TxStatusComitted
}
for _, v := range tx.lstr { for _, v := range tx.lstr {
v.PostTxRollback(tx.id, result) v.PostTxRollback(tx.id, result)
} }
@@ -73,6 +97,10 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re
res, err := tx.tx.NamedExecContext(ctx, sqlstr, prep) res, err := tx.tx.NamedExecContext(ctx, sqlstr, prep)
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
for _, v := range tx.lstr { for _, v := range tx.lstr {
v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep) v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep)
} }
@@ -94,6 +122,10 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx
rows, err := sqlx.NamedQueryContext(ctx, tx.tx, sqlstr, prep) rows, err := sqlx.NamedQueryContext(ctx, tx.tx, sqlstr, prep)
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
for _, v := range tx.lstr { for _, v := range tx.lstr {
v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep) v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep)
} }
@@ -103,3 +135,11 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx
} }
return rows, nil return rows, nil
} }
func (tx *transaction) Status() TxStatus {
return tx.status
}
func (tx *transaction) Traffic() (int, int) {
return tx.execCtr, tx.queryCtr
}

View File

@@ -27,10 +27,12 @@ func (a *AtomicBool) Get() bool {
return a.v return a.v
} }
func (a *AtomicBool) Set(value bool) { func (a *AtomicBool) Set(value bool) bool {
a.lock.Lock() a.lock.Lock()
defer a.lock.Unlock() defer a.lock.Unlock()
oldValue := a.v
a.v = value a.v = value
for k, v := range a.listener { for k, v := range a.listener {
@@ -42,6 +44,8 @@ func (a *AtomicBool) Set(value bool) {
delete(a.listener, k) delete(a.listener, k)
} }
} }
return oldValue
} }
func (a *AtomicBool) Wait(waitFor bool) { func (a *AtomicBool) Wait(waitFor bool) {

View File

@@ -71,12 +71,12 @@ func SupportsColors() bool {
} }
} }
var term256Regex = regexp.MustCompile("(?i)-256(color)?$") var term256Regex = regexp.MustCompile(`(?i)-256(color)?$`)
if term256Regex.MatchString(termenv) { if term256Regex.MatchString(termenv) {
return true return true
} }
var termBasicRegex = regexp.MustCompile("(?i)^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux") var termBasicRegex = regexp.MustCompile(`(?i)^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux`)
if termBasicRegex.MatchString(termenv) { if termBasicRegex.MatchString(termenv) {
return true return true

View File

@@ -1,6 +1,7 @@
package termext package termext
import ( import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"math/rand" "math/rand"
"testing" "testing"
) )
@@ -32,9 +33,3 @@ func TestColor(t *testing.T) {
tst.AssertEqual(t, CleanString(Gray("test")), "test") tst.AssertEqual(t, CleanString(Gray("test")), "test")
tst.AssertEqual(t, CleanString(White("test")), "test") tst.AssertEqual(t, CleanString(White("test")), "test")
} }
func tst.AssertEqual(t *testing.T, actual string, expected string) {
if actual != expected {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
}
}

28
timeext/calendarweek.go Normal file
View File

@@ -0,0 +1,28 @@
package timeext
import "time"
func WeekStart(year, week int) time.Time {
// https://stackoverflow.com/a/52303730/1761622
// Start from the middle of the year:
t := time.Date(year, 7, 1, 0, 0, 0, 0, time.UTC)
// Roll back to Monday:
if wd := t.Weekday(); wd == time.Sunday {
t = t.AddDate(0, 0, -6)
} else {
t = t.AddDate(0, 0, -int(wd)+1)
}
// Difference in weeks:
_, w := t.ISOWeek()
t = t.AddDate(0, 0, (week-w)*7)
return t
}
func WeekEnd(year, week int) time.Time {
return WeekStart(year, week).AddDate(0, 0, 7).Add(time.Duration(-1))
}

View File

@@ -0,0 +1,25 @@
package timeext
import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
"time"
)
func TestWeekStart(t *testing.T) {
tst.AssertEqual(t, WeekStart(2018, 1).Format(time.RFC3339Nano), "2018-01-01T00:00:00Z")
tst.AssertEqual(t, WeekStart(2018, 2).Format(time.RFC3339Nano), "2018-01-08T00:00:00Z")
tst.AssertEqual(t, WeekStart(2019, 1).Format(time.RFC3339Nano), "2018-12-31T00:00:00Z")
tst.AssertEqual(t, WeekStart(2019, 2).Format(time.RFC3339Nano), "2019-01-07T00:00:00Z")
}
func TestWeekEnd(t *testing.T) {
tst.AssertEqual(t, WeekEnd(2018, 1).Format(time.RFC3339Nano), "2018-01-07T23:59:59.999999999Z")
tst.AssertEqual(t, WeekEnd(2018, 2).Format(time.RFC3339Nano), "2018-01-14T23:59:59.999999999Z")
tst.AssertEqual(t, WeekEnd(2019, 1).Format(time.RFC3339Nano), "2019-01-06T23:59:59.999999999Z")
tst.AssertEqual(t, WeekEnd(2019, 2).Format(time.RFC3339Nano), "2019-01-13T23:59:59.999999999Z")
}

View File

@@ -7,56 +7,56 @@ import (
func TestParseDurationShortString(t *testing.T) { func TestParseDurationShortString(t *testing.T) {
tst.AssertPDSSEqual(t, FromSeconds(1), "1s") assertPDSSEqual(t, FromSeconds(1), "1s")
tst.AssertPDSSEqual(t, FromSeconds(1), "1sec") assertPDSSEqual(t, FromSeconds(1), "1sec")
tst.AssertPDSSEqual(t, FromSeconds(1), "1second") assertPDSSEqual(t, FromSeconds(1), "1second")
tst.AssertPDSSEqual(t, FromSeconds(1), "1seconds") assertPDSSEqual(t, FromSeconds(1), "1seconds")
tst.AssertPDSSEqual(t, FromSeconds(100), "100second") assertPDSSEqual(t, FromSeconds(100), "100second")
tst.AssertPDSSEqual(t, FromSeconds(100), "100seconds") assertPDSSEqual(t, FromSeconds(100), "100seconds")
tst.AssertPDSSEqual(t, FromSeconds(1883639.77), "1883639.77second") assertPDSSEqual(t, FromSeconds(1883639.77), "1883639.77second")
tst.AssertPDSSEqual(t, FromSeconds(1883639.77), "1883639.77seconds") assertPDSSEqual(t, FromSeconds(1883639.77), "1883639.77seconds")
tst.AssertPDSSEqual(t, FromSeconds(50), "50s") assertPDSSEqual(t, FromSeconds(50), "50s")
tst.AssertPDSSEqual(t, FromSeconds(50), "50sec") assertPDSSEqual(t, FromSeconds(50), "50sec")
tst.AssertPDSSEqual(t, FromSeconds(1), "1second") assertPDSSEqual(t, FromSeconds(1), "1second")
tst.AssertPDSSEqual(t, FromSeconds(50), "50seconds") assertPDSSEqual(t, FromSeconds(50), "50seconds")
tst.AssertPDSSEqual(t, FromMinutes(10), "10m") assertPDSSEqual(t, FromMinutes(10), "10m")
tst.AssertPDSSEqual(t, FromMinutes(10), "10min") assertPDSSEqual(t, FromMinutes(10), "10min")
tst.AssertPDSSEqual(t, FromMinutes(1), "1minute") assertPDSSEqual(t, FromMinutes(1), "1minute")
tst.AssertPDSSEqual(t, FromMinutes(10), "10minutes") assertPDSSEqual(t, FromMinutes(10), "10minutes")
tst.AssertPDSSEqual(t, FromMinutes(10.5), "10.5minutes") assertPDSSEqual(t, FromMinutes(10.5), "10.5minutes")
tst.AssertPDSSEqual(t, FromMilliseconds(100), "100ms") assertPDSSEqual(t, FromMilliseconds(100), "100ms")
tst.AssertPDSSEqual(t, FromMilliseconds(100), "100milliseconds") assertPDSSEqual(t, FromMilliseconds(100), "100milliseconds")
tst.AssertPDSSEqual(t, FromMilliseconds(100), "100millisecond") assertPDSSEqual(t, FromMilliseconds(100), "100millisecond")
tst.AssertPDSSEqual(t, FromNanoseconds(99235), "99235ns") assertPDSSEqual(t, FromNanoseconds(99235), "99235ns")
tst.AssertPDSSEqual(t, FromNanoseconds(99235), "99235nanoseconds") assertPDSSEqual(t, FromNanoseconds(99235), "99235nanoseconds")
tst.AssertPDSSEqual(t, FromNanoseconds(99235), "99235nanosecond") assertPDSSEqual(t, FromNanoseconds(99235), "99235nanosecond")
tst.AssertPDSSEqual(t, FromMicroseconds(99235), "99235us") assertPDSSEqual(t, FromMicroseconds(99235), "99235us")
tst.AssertPDSSEqual(t, FromMicroseconds(99235), "99235microseconds") assertPDSSEqual(t, FromMicroseconds(99235), "99235microseconds")
tst.AssertPDSSEqual(t, FromMicroseconds(99235), "99235microsecond") assertPDSSEqual(t, FromMicroseconds(99235), "99235microsecond")
tst.AssertPDSSEqual(t, FromHours(1), "1h") assertPDSSEqual(t, FromHours(1), "1h")
tst.AssertPDSSEqual(t, FromHours(1), "1hour") assertPDSSEqual(t, FromHours(1), "1hour")
tst.AssertPDSSEqual(t, FromHours(2), "2hours") assertPDSSEqual(t, FromHours(2), "2hours")
tst.AssertPDSSEqual(t, FromDays(1), "1d") assertPDSSEqual(t, FromDays(1), "1d")
tst.AssertPDSSEqual(t, FromDays(1), "1day") assertPDSSEqual(t, FromDays(1), "1day")
tst.AssertPDSSEqual(t, FromDays(10), "10days") assertPDSSEqual(t, FromDays(10), "10days")
tst.AssertPDSSEqual(t, FromDays(1), "1days") assertPDSSEqual(t, FromDays(1), "1days")
tst.AssertPDSSEqual(t, FromDays(10), "10day") assertPDSSEqual(t, FromDays(10), "10day")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10)+FromSeconds(200), "1d10m200sec") assertPDSSEqual(t, FromDays(1)+FromMinutes(10)+FromSeconds(200), "1d10m200sec")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d:10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d:10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d 10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d 10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d,10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d,10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d, 10m") assertPDSSEqual(t, FromDays(1)+FromMinutes(10), "1d, 10m")
tst.AssertPDSSEqual(t, FromDays(1)+FromSeconds(1000), "1d 1000seconds") assertPDSSEqual(t, FromDays(1)+FromSeconds(1000), "1d 1000seconds")
tst.AssertPDSSEqual(t, FromDays(1), "86400s") assertPDSSEqual(t, FromDays(1), "86400s")
} }
func assertPDSSEqual(t *testing.T, expected time.Duration, fmt string) { func assertPDSSEqual(t *testing.T, expected time.Duration, fmt string) {

View File

@@ -22,6 +22,17 @@ func TimeToDatePart(t time.Time, tz *time.Location) time.Time {
return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
} }
// TimeToDayStart returns a timestamp at the start of the day which contains t (= 00:00:00)
func TimeToDayStart(t time.Time, tz *time.Location) time.Time {
t = t.In(tz)
return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
}
// TimeToDayEnd returns a timestamp at the end of the day which contains t (= 23:59:59)
func TimeToDayEnd(t time.Time, tz *time.Location) time.Time {
return TimeToDayStart(t, tz).AddDate(0, 0, 1).Add(-1)
}
// TimeToWeekStart returns a timestamp at the start of the week which contains t (= Monday 00:00:00) // TimeToWeekStart returns a timestamp at the start of the week which contains t (= Monday 00:00:00)
func TimeToWeekStart(t time.Time, tz *time.Location) time.Time { func TimeToWeekStart(t time.Time, tz *time.Location) time.Time {
t = TimeToDatePart(t, tz) t = TimeToDatePart(t, tz)

View File

@@ -2,23 +2,73 @@ package tst
import ( import (
"encoding/hex" "encoding/hex"
"reflect"
"runtime/debug" "runtime/debug"
"testing" "testing"
) )
func AssertEqual[T comparable](t *testing.T, actual T, expected T) { func AssertEqual[T comparable](t *testing.T, actual T, expected T) {
t.Helper()
if actual != expected { if actual != expected {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected) t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
} }
} }
func AssertArrayEqual[T comparable](t *testing.T, actual []T, expected []T) {
t.Helper()
if len(actual) != len(expected) {
t.Errorf("values differ: Actual: '%v', Expected: '%v' (len %d <> %d)", actual, expected, len(actual), len(expected))
return
}
for i := 0; i < len(actual); i++ {
if actual[i] != expected[i] {
t.Errorf("values differ: Actual: '%v', Expected: '%v' (at index %d)", actual, expected, i)
return
}
}
}
func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) { func AssertNotEqual[T comparable](t *testing.T, actual T, expected T) {
t.Helper()
if actual == expected { if actual == expected {
t.Errorf("values do not differ: Actual: '%v', Expected: '%v'", actual, expected) t.Errorf("values do not differ: Actual: '%v', Expected: '%v'", actual, expected)
} }
} }
func AssertDeepEqual[T any](t *testing.T, actual T, expected T) {
t.Helper()
if !reflect.DeepEqual(actual, expected) {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
}
}
func AssertSetDeepEqual[T any](t *testing.T, actual []T, expected []T) {
t.Helper()
if len(actual) != len(expected) {
t.Errorf("values differ in length: Actual (n=%d): '%v', Expected (n=%d): '%v'", len(actual), actual, len(expected), expected)
}
for _, a := range expected {
found := false
for _, b := range actual {
found = found || reflect.DeepEqual(a, b)
}
if !found {
t.Errorf("values differ: Element '%v' not found. Actual: '%v', Expected: '%v'", a, actual, expected)
return
}
}
}
func AssertNotDeepEqual[T any](t *testing.T, actual T, expected T) {
t.Helper()
if reflect.DeepEqual(actual, expected) {
t.Errorf("values do not differ: Actual: '%v', Expected: '%v'", actual, expected)
}
}
func AssertDeRefEqual[T comparable](t *testing.T, actual *T, expected T) { func AssertDeRefEqual[T comparable](t *testing.T, actual *T, expected T) {
t.Helper()
if actual == nil { if actual == nil {
t.Errorf("values differ: Actual: NIL, Expected: '%v'", expected) t.Errorf("values differ: Actual: NIL, Expected: '%v'", expected)
} }
@@ -28,6 +78,7 @@ func AssertDeRefEqual[T comparable](t *testing.T, actual *T, expected T) {
} }
func AssertPtrEqual[T comparable](t *testing.T, actual *T, expected *T) { func AssertPtrEqual[T comparable](t *testing.T, actual *T, expected *T) {
t.Helper()
if actual == nil && expected == nil { if actual == nil && expected == nil {
return return
} }
@@ -47,6 +98,7 @@ func AssertPtrEqual[T comparable](t *testing.T, actual *T, expected *T) {
} }
func AssertHexEqual(t *testing.T, expected string, actual []byte) { func AssertHexEqual(t *testing.T, expected string, actual []byte) {
t.Helper()
actualStr := hex.EncodeToString(actual) actualStr := hex.EncodeToString(actual)
if actualStr != expected { if actualStr != expected {
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actualStr, expected) t.Errorf("values differ: Actual: '%v', Expected: '%v'", actualStr, expected)
@@ -54,18 +106,21 @@ func AssertHexEqual(t *testing.T, expected string, actual []byte) {
} }
func AssertTrue(t *testing.T, value bool) { func AssertTrue(t *testing.T, value bool) {
t.Helper()
if !value { if !value {
t.Error("value should be true\n" + string(debug.Stack())) t.Error("value should be true\n" + string(debug.Stack()))
} }
} }
func AssertFalse(t *testing.T, value bool) { func AssertFalse(t *testing.T, value bool) {
t.Helper()
if value { if value {
t.Error("value should be false\n" + string(debug.Stack())) t.Error("value should be false\n" + string(debug.Stack()))
} }
} }
func AssertNoErr(t *testing.T, anerr error) { func AssertNoErr(t *testing.T, anerr error) {
t.Helper()
if anerr != nil { if anerr != nil {
t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack())) t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack()))
} }

21
tst/must.go Normal file
View File

@@ -0,0 +1,21 @@
package tst
import (
"runtime/debug"
"testing"
)
// Must can b used to AssertNoErr of an (T, err) function
//
// Usage:
//
// input := "123.8"
// value := tst.Must(strconv.Atoi(input))(t)
func Must[T any](v T, anerr error) func(t *testing.T) T {
return func(t *testing.T) T {
if anerr != nil {
t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack()))
}
return v
}
}

View File

@@ -5,6 +5,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsontype" "go.mongodb.org/mongo-driver/bson/bsontype"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect" "reflect"
) )
@@ -43,6 +44,8 @@ type Coll[TData any] struct {
implDataTypeMap map[reflect.Type]map[string]fullTypeRef // dynamic list of fields of TData implementations (only if TData is an interface) implDataTypeMap map[reflect.Type]map[string]fullTypeRef // dynamic list of fields of TData implementations (only if TData is an interface)
customDecoder *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) customDecoder *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface)
isInterfaceDataType bool // true if TData is an interface (not a struct) isInterfaceDataType bool // true if TData is an interface (not a struct)
unmarshalHooks []func(d TData) TData // called for every object after unmarshalling
extraModPipeline mongo.Pipeline // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc
} }
func (c *Coll[TData]) Collection() *mongo.Collection { func (c *Coll[TData]) Collection() *mongo.Collection {
@@ -53,6 +56,18 @@ func (c *Coll[TData]) Name() string {
return c.coll.Name() return c.coll.Name()
} }
func (c *Coll[TData]) Indexes() mongo.IndexView {
return c.coll.Indexes()
}
func (c *Coll[TData]) Drop(ctx context.Context) error {
err := c.coll.Drop(ctx)
if err != nil {
return exerr.Wrap(err, "failed to drop collection").Str("collection", c.Name()).Build()
}
return nil
}
func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] { func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] {
c.EnsureInitializedReflection(example) c.EnsureInitializedReflection(example)
@@ -61,26 +76,30 @@ func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable
return c return c
} }
func (c *Coll[TData]) Indexes() mongo.IndexView { func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] {
return c.coll.Indexes() c.unmarshalHooks = append(c.unmarshalHooks, fn)
return c
} }
func (c *Coll[TData]) Drop(ctx context.Context) error { func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] {
return c.coll.Drop(ctx) c.extraModPipeline = append(c.extraModPipeline, p...)
return c
} }
func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) { func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) {
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)
if err != nil { if err != nil {
return ct.CursorToken{}, err return ct.CursorToken{}, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
} }
valueSeconary := "" valueSeconary := ""
if fieldSecondary != nil && dirSecondary != nil { if fieldSecondary != nil && dirSecondary != nil {
valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary) valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary)
if err != nil { if err != nil {
return ct.CursorToken{}, err return ct.CursorToken{}, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
} }
} }

View File

@@ -2,53 +2,88 @@ package wmo
import ( import (
"context" "context"
"go.mongodb.org/mongo-driver/bson"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) { func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) {
var res TData
var err error
if c.customDecoder != nil { if c.customDecoder != nil {
res, err = (*c.customDecoder)(ctx, dec)
return (*c.customDecoder)(ctx, dec)
} else {
var res TData
err := dec.Decode(&res)
if err != nil { if err != nil {
return *new(TData), err return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
}
} else {
err = dec.Decode(&res)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build()
} }
return res, nil
} }
for _, hook := range c.unmarshalHooks {
res = hook(res)
}
return res, nil
} }
func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) { func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) {
res := make([]TData, 0, cursor.RemainingBatchLength())
if c.customDecoder != nil { if c.customDecoder != nil {
res := make([]TData, 0, cursor.RemainingBatchLength())
for cursor.Next(ctx) { for cursor.Next(ctx) {
entry, err := (*c.customDecoder)(ctx, cursor) entry, err := (*c.customDecoder)(ctx, cursor)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to decode entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
} }
res = append(res, entry) res = append(res, entry)
} }
} else {
err := cursor.All(ctx, &res)
if err != nil {
return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build()
}
}
return res, nil for i := 0; i < len(res); i++ {
for _, hook := range c.unmarshalHooks {
res[i] = hook(res[i])
}
}
return res, nil
}
func (c *Coll[TData]) decodeSingleOrRequery(ctx context.Context, dec Decodable) (TData, error) {
if c.extraModPipeline == nil {
// simple case, we can just decode the result and return it
return c.decodeSingle(ctx, dec)
} else { } else {
res := make([]TData, 0, cursor.RemainingBatchLength()) // annyoing case, we have a extraModPipeline and need to re-query the document such that the extraModPipeline is applied...
err := cursor.All(ctx, &res) type genDoc struct {
ID any `bson:"_id"`
}
var res genDoc
err := dec.Decode(&res)
if err != nil { if err != nil {
return nil, err return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").Build()
} }
return res, nil v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).Build()
}
return *v, nil
} }
} }

View File

@@ -1,87 +0,0 @@
package wmo
import (
"errors"
"go.mongodb.org/mongo-driver/bson"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
)
func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
cond := bson.A{}
sort := bson.D{}
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil {
return nil, err
}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil {
return nil, err
}
if *sortSecondary == ct.SortASC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
}
pipeline := make([]bson.D, 0, 3)
if token.Mode == ct.CTMStart {
// no gt/lt condition
} else if token.Mode == ct.CTMNormal {
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
} else if token.Mode == ct.CTMEnd {
// false
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
} else {
return nil, errors.New("unknown ct mode: " + string(token.Mode))
}
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
if pageSize != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
}
return pipeline, nil
}

View File

@@ -2,35 +2,42 @@ package wmo
import ( import (
"context" "context"
"errors"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
} }
res, err := c.decodeAll(ctx, cursor) res, err := c.decodeAll(ctx, cursor)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to decode values").Build()
} }
return res, nil return res, nil
} }
func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
} }
if cursor.Next(ctx) { if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor) v, err := c.decodeSingle(ctx, cursor)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to decode single value").Build()
} }
return &v, nil return &v, nil
} }
@@ -39,18 +46,21 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli
} }
func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil { if err != nil {
return *new(TData), err return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
} }
if cursor.Next(ctx) { if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor) v, err := c.decodeSingle(ctx, cursor)
if err != nil { if err != nil {
return *new(TData), err return *new(TData), exerr.Wrap(err, "failed to decode single value").Build()
} }
return v, nil return v, nil
} }
return *new(TData), errors.New("no document in result") return *new(TData), exerr.Wrap(mongo.ErrNoDocuments, "no document in result").Build()
} }

View File

@@ -4,12 +4,13 @@ import (
"context" "context"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) DeleteOneByID(ctx context.Context, id EntityID) error { func (c *Coll[TData]) DeleteOneByID(ctx context.Context, id EntityID) error {
_, err := c.coll.DeleteOne(ctx, bson.M{"_id": id}) _, err := c.coll.DeleteOne(ctx, bson.M{"_id": id})
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[delete-one-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
} }
return nil return nil
@@ -18,7 +19,7 @@ func (c *Coll[TData]) DeleteOneByID(ctx context.Context, id EntityID) error {
func (c *Coll[TData]) DeleteOne(ctx context.Context, filterQuery bson.M) error { func (c *Coll[TData]) DeleteOne(ctx context.Context, filterQuery bson.M) error {
_, err := c.coll.DeleteOne(ctx, filterQuery) _, err := c.coll.DeleteOne(ctx, filterQuery)
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[delete-one] failed").Any("filterQuery", filterQuery).Str("collection", c.Name()).Build()
} }
return nil return nil
@@ -27,7 +28,7 @@ func (c *Coll[TData]) DeleteOne(ctx context.Context, filterQuery bson.M) error {
func (c *Coll[TData]) DeleteMany(ctx context.Context, filterQuery bson.M) (*mongo.DeleteResult, error) { func (c *Coll[TData]) DeleteMany(ctx context.Context, filterQuery bson.M) (*mongo.DeleteResult, error) {
res, err := c.coll.DeleteMany(ctx, filterQuery) res, err := c.coll.DeleteMany(ctx, filterQuery)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-query[delete-many] failed").Any("filterQuery", filterQuery).Str("collection", c.Name()).Build()
} }
return res, nil return res, nil

View File

@@ -4,58 +4,25 @@ import (
"context" "context"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options" "gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { func (c *Coll[TData]) Find(ctx context.Context, filter bson.M) ([]TData, error) {
mongoRes := c.coll.FindOne(ctx, filter)
return c.decodeSingle(ctx, mongoRes) pipeline := mongo.Pipeline{}
} pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}})
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) { pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
mongoRes := c.coll.FindOne(ctx, filter)
res, err := c.decodeSingle(ctx, mongoRes) cursor, err := c.coll.Aggregate(ctx, pipeline)
if err == mongo.ErrNoDocuments {
return nil, nil
}
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
return &res, nil
}
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
return c.decodeSingle(ctx, mongoRes)
}
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
res, err := c.decodeSingle(ctx, mongoRes)
if err == mongo.ErrNoDocuments {
return nil, nil
}
if err != nil {
return nil, err
}
return &res, nil
}
func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) {
cursor, err := c.coll.Find(ctx, filter, opts...)
if err != nil {
return nil, err
} }
res, err := c.decodeAll(ctx, cursor) res, err := c.decodeAll(ctx, cursor)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to decode values").Build()
} }
return res, nil return res, nil

93
wmo/queryFindOne.go Normal file
View File

@@ -0,0 +1,93 @@
package wmo
import (
"context"
"errors"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
r, err := c.findOneInternal(ctx, filter, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build()
}
return *r, nil
}
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
r, err := c.findOneInternal(ctx, filter, true)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build()
}
return r, nil
}
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
r, err := c.findOneInternal(ctx, bson.M{"_id": id}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
}
return *r, nil
}
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
r, err := c.findOneInternal(ctx, bson.M{"_id": id}, true)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
}
return r, nil
}
func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowNull bool) (*TData, error) {
if len(c.extraModPipeline) == 0 {
// simple case, use mongo FindOne
mongoRes := c.coll.FindOne(ctx, filter)
res, err := c.decodeSingle(ctx, mongoRes)
if allowNull && errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil
}
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).Build()
}
return &res, nil
} else {
// complex case, we one ore more additional pipeline stages, convert to aggregation
pipeline := mongo.Pipeline{}
pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}})
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}})
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
return &v, nil
} else if allowNull {
return nil, nil
} else {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
}
}

View File

@@ -4,20 +4,42 @@ import (
"context" "context"
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
) )
func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) { func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) {
insRes, err := c.coll.InsertOne(ctx, valueIn) insRes, err := c.coll.InsertOne(ctx, valueIn)
if err != nil { if err != nil {
return *new(TData), err return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
} }
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID}) r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
return *r, nil
}
return c.decodeSingle(ctx, mongoRes) // InsertOneUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is any instead of TData)
func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TData, error) {
insRes, err := c.coll.InsertOne(ctx, valueIn)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
return *r, nil
} }
func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) {
return c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn))
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build()
}
return insRes, nil
} }

View File

@@ -5,6 +5,7 @@ import (
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
@@ -33,23 +34,34 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
sortDirSecondary = nil sortDirSecondary = nil
} }
paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.
Wrap(err, "failed to create pagination").
WithType(exerr.TypeCursorTokenDecode).
Str("collection", c.Name()).
Any("inTok", inTok).
Any("sortPrimary", sortPrimary).
Any("sortDirPrimary", sortDirPrimary).
Any("sortSecondary", sortSecondary).
Any("sortDirSecondary", sortDirSecondary).
Any("pageSize", pageSize).
Build()
} }
pipeline = append(pipeline, paginationPipeline...) pipeline = append(pipeline, paginationPipeline...)
pipeline = append(pipeline, c.extraModPipeline...)
cursor, err := c.coll.Aggregate(ctx, pipeline) cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
} }
// fast branch // fast branch
if pageSize == nil { if pageSize == nil {
entries, err := c.decodeAll(ctx, cursor) entries, err := c.decodeAll(ctx, cursor)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build()
} }
return entries, ct.End(), nil return entries, ct.End(), nil
} }
@@ -59,7 +71,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
var entry TData var entry TData
entry, err = c.decodeSingle(ctx, cursor) entry, err = c.decodeSingle(ctx, cursor)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build()
} }
entities = append(entities, entry) entities = append(entities, entry)
} }
@@ -74,34 +86,128 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, err return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build()
} }
return entities, nextToken, nil return entities, nextToken, nil
} }
type countRes struct { func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) {
Count int64 `bson:"c"` type countRes struct {
} Count int64 `bson:"c"`
}
func (c *Coll[TData]) Count(ctx context.Context, filter ct.Filter) (int64, error) {
pipeline := filter.FilterQuery() pipeline := filter.FilterQuery()
pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}}) pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}})
cursor, err := c.coll.Aggregate(ctx, pipeline) cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil { if err != nil {
return 0, err return 0, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
} }
if cursor.Next(ctx) { if cursor.Next(ctx) {
v := countRes{} v := countRes{}
err = cursor.Decode(&v) err = cursor.Decode(&v)
if err != nil { if err != nil {
return 0, err return 0, exerr.Wrap(err, "failed to decode entity").Build()
} }
return v.Count, nil return v.Count, nil
} }
return 0, nil return 0, nil
} }
func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, int64, error) {
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
count, err := c.Count(ctx, filter)
if err != nil {
return nil, ct.CursorToken{}, 0, err
}
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, ct.CursorToken{}, 0, err
}
return data, token, count, nil
}
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
cond := bson.A{}
sort := bson.D{}
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
}
if *sortSecondary == ct.SortASC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
}
pipeline := make([]bson.D, 0, 3)
if token.Mode == ct.CTMStart {
// no gt/lt condition
} else if token.Mode == ct.CTMNormal {
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
} else if token.Mode == ct.CTMEnd {
// false
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
} else {
return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
}
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
if pageSize != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
}
return pipeline, nil
}

86
wmo/queryPaginate.go Normal file
View File

@@ -0,0 +1,86 @@
package wmo
import (
"context"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
pag "gogs.mikescher.com/BlackForestBytes/goext/pagination"
)
func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, limit *int) ([]TData, pag.Pagination, error) {
type totalCountResult struct {
Count int `bson:"count"`
}
if page < 0 {
page = 1
}
pipelineSort := mongo.Pipeline{}
pipelineFilter := mongo.Pipeline{}
pf1 := "_id"
pd1 := ct.SortASC
if filter != nil {
pipelineFilter = filter.FilterQuery()
pf1, pd1 = filter.Pagination()
}
if pd1 == ct.SortASC {
pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: bson.D{{Key: pf1, Value: +1}}}})
} else if pd1 == ct.SortDESC {
pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: bson.D{{Key: pf1, Value: -1}}}})
}
pipelinePaginate := mongo.Pipeline{}
if limit != nil {
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *limit * (page - 1)}})
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *limit}})
} else {
page = 1
}
pipelineCount := mongo.Pipeline{}
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$count", Value: "count"}})
pipelineList := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelinePaginate, c.extraModPipeline)
pipelineTotalCount := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelineCount)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
entities, err := c.decodeAll(ctx, cursorList)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "failed to all-decode entities").Build()
}
cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
var tcRes totalCountResult
if cursorTotalCount.Next(ctx) {
err = cursorTotalCount.Decode(&tcRes)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
} else {
tcRes.Count = 0 // no entries in DB
}
paginationObj := pag.Pagination{
Page: page,
Limit: langext.Coalesce(limit, tcRes.Count),
TotalPages: pag.CalcPaginationTotalPages(tcRes.Count, langext.Coalesce(limit, tcRes.Count)),
TotalItems: tcRes.Count,
CurrentPageCount: len(entities),
}
return entities, paginationObj, nil
}

View File

@@ -5,18 +5,30 @@ import (
"go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
) )
func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (TData, error) { func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (TData, error) {
mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, updateQuery, options.FindOneAndUpdate().SetReturnDocument(options.After)) mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, updateQuery, options.FindOneAndUpdate().SetReturnDocument(options.After))
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-and-update] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Any("updateQuery", updateQuery).
Build()
}
return c.decodeSingle(ctx, mongoRes) return c.decodeSingleOrRequery(ctx, mongoRes)
} }
func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error { func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error {
_, err := c.coll.UpdateOne(ctx, filterQuery, updateQuery) _, err := c.coll.UpdateOne(ctx, filterQuery, updateQuery)
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[update-one] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Any("updateQuery", updateQuery).
Build()
} }
return nil return nil
@@ -25,7 +37,11 @@ func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQ
func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuery bson.M) error { func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuery bson.M) error {
_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery) _, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery)
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[update-one-by-id] failed").
Str("collection", c.Name()).
Id("id", id).
Any("updateQuery", updateQuery).
Build()
} }
return nil return nil
@@ -34,7 +50,11 @@ func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuer
func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (*mongo.UpdateResult, error) { func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (*mongo.UpdateResult, error) {
res, err := c.coll.UpdateMany(ctx, filterQuery, updateQuery) res, err := c.coll.UpdateMany(ctx, filterQuery, updateQuery)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "mongo-query[update-many] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Any("updateQuery", updateQuery).
Build()
} }
return res, nil return res, nil
@@ -43,14 +63,23 @@ func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, update
func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error { func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error {
_, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value}) _, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value})
if err != nil { if err != nil {
return err return exerr.Wrap(err, "mongo-query[replace-one] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Build()
} }
return nil return nil
} }
func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) { func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) {
mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, bson.M{"$set": value}, options.FindOneAndUpdate().SetReturnDocument(options.After)) mongoRes := c.coll.FindOneAndReplace(ctx, filterQuery, value, options.FindOneAndReplace().SetReturnDocument(options.After))
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-and-update] failed").
Str("collection", c.Name()).
Any("filterQuery", filterQuery).
Build()
}
return c.decodeSingle(ctx, mongoRes) return c.decodeSingleOrRequery(ctx, mongoRes)
} }

View File

@@ -1,7 +1,7 @@
package wmo package wmo
import ( import (
"errors" "gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext" "gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/reflectext" "gogs.mikescher.com/BlackForestBytes/goext/reflectext"
"reflect" "reflect"
@@ -25,7 +25,7 @@ func (c *Coll[TData]) EnsureInitializedReflection(v TData) {
m := make(map[string]fullTypeRef) m := make(map[string]fullTypeRef)
c.initFields("", rval, m, make([]int, 0)) c.initFields("", rval.Type(), m, make([]int, 0), make([]reflect.Type, 0))
c.implDataTypeMap[rval.Type()] = m c.implDataTypeMap[rval.Type()] = m
} }
@@ -50,20 +50,16 @@ func (c *Coll[TData]) init() {
c.implDataTypeMap = make(map[reflect.Type]map[string]fullTypeRef) c.implDataTypeMap = make(map[reflect.Type]map[string]fullTypeRef)
v := reflect.ValueOf(example) v := reflect.ValueOf(example)
c.initFields("", v, c.dataTypeMap, make([]int, 0)) c.initFields("", v.Type(), c.dataTypeMap, make([]int, 0), make([]reflect.Type, 0))
} }
} }
func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, m map[string]fullTypeRef, idxarr []int) { func (c *Coll[TData]) initFields(prefix string, rtyp reflect.Type, m map[string]fullTypeRef, idxarr []int, typesInPath []reflect.Type) {
rtyp := rval.Type()
for i := 0; i < rtyp.NumField(); i++ { for i := 0; i < rtyp.NumField(); i++ {
rsfield := rtyp.Field(i) rsfield := rtyp.Field(i)
rvfield := rval.Field(i)
if !rsfield.IsExported() { if !rsfield.IsExported() {
continue continue
@@ -91,21 +87,21 @@ func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, m map[string
newIdxArr := langext.ArrCopy(idxarr) newIdxArr := langext.ArrCopy(idxarr)
newIdxArr = append(newIdxArr, i) newIdxArr = append(newIdxArr, i)
if langext.InArray("inline", bsontags) && rvfield.Kind() == reflect.Struct { if langext.InArray("inline", bsontags) && rsfield.Type.Kind() == reflect.Struct {
// pass-through field // pass-through field
c.initFields(prefix, rvfield, m, newIdxArr) c.initFields(prefix, rsfield.Type, m, newIdxArr, typesInPath)
} else { } else {
if rvfield.Type().Kind() == reflect.Pointer { if rsfield.Type.Kind() == reflect.Pointer {
m[fullKey] = fullTypeRef{ m[fullKey] = fullTypeRef{
IsPointer: true, IsPointer: true,
RealType: rvfield.Type(), RealType: rsfield.Type,
Kind: rvfield.Type().Elem().Kind(), Kind: rsfield.Type.Elem().Kind(),
Type: rvfield.Type().Elem(), Type: rsfield.Type.Elem(),
UnderlyingType: reflectext.Underlying(rvfield.Type().Elem()), UnderlyingType: reflectext.Underlying(rsfield.Type.Elem()),
Name: rsfield.Name, Name: rsfield.Name,
Index: newIdxArr, Index: newIdxArr,
} }
@@ -114,20 +110,37 @@ func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, m map[string
m[fullKey] = fullTypeRef{ m[fullKey] = fullTypeRef{
IsPointer: false, IsPointer: false,
RealType: rvfield.Type(), RealType: rsfield.Type,
Kind: rvfield.Type().Kind(), Kind: rsfield.Type.Kind(),
Type: rvfield.Type(), Type: rsfield.Type,
UnderlyingType: reflectext.Underlying(rvfield.Type()), UnderlyingType: reflectext.Underlying(rsfield.Type),
Name: rsfield.Name, Name: rsfield.Name,
Index: newIdxArr, Index: newIdxArr,
} }
} }
if rvfield.Kind() == reflect.Struct { if rsfield.Type.Kind() == reflect.Struct {
c.initFields(fullKey+".", rvfield, m, newIdxArr) c.initFields(fullKey+".", rsfield.Type, m, newIdxArr, typesInPath)
} }
if rsfield.Type.Kind() == reflect.Pointer && rsfield.Type.Elem().Kind() == reflect.Struct {
innerType := rsfield.Type.Elem()
// check if there is recursion
recursion := false
for _, typ := range typesInPath {
recursion = recursion || (typ == innerType)
}
if !recursion {
// Store all seen types before that deref a pointer to prevent endless recursion
newTypesInPath := make([]reflect.Type, len(typesInPath))
copy(newTypesInPath, typesInPath)
newTypesInPath = append(newTypesInPath, rtyp)
c.initFields(fullKey+".", innerType, m, newIdxArr, newTypesInPath)
}
}
} }
} }
@@ -138,7 +151,7 @@ func (c *Coll[TData]) getTokenValueAsMongoType(value string, fieldName string) (
fref, err := c.getFieldType(fieldName) fref, err := c.getFieldType(fieldName)
if err != nil { if err != nil {
return nil, err return nil, exerr.Wrap(err, "failed to get-field-type").Str("fieldName", fieldName).Build()
} }
pss := reflectext.PrimitiveStringSerializer{} pss := reflectext.PrimitiveStringSerializer{}
@@ -151,7 +164,7 @@ func (c *Coll[TData]) getFieldValueAsTokenString(entity TData, fieldName string)
realValue, err := c.getFieldValue(entity, fieldName) realValue, err := c.getFieldValue(entity, fieldName)
if err != nil { if err != nil {
return "", err return "", exerr.Wrap(err, "failed to get-field-value").Str("fieldName", fieldName).Build()
} }
pss := reflectext.PrimitiveStringSerializer{} pss := reflectext.PrimitiveStringSerializer{}
@@ -169,14 +182,14 @@ func (c *Coll[TData]) getFieldType(fieldName string) (fullTypeRef, error) {
} }
} }
return fullTypeRef{}, errors.New("unknown field: '" + fieldName + "' (in any impl)") return fullTypeRef{}, exerr.New(exerr.TypeMongoReflection, "unknown field: '"+fieldName+"' (in any impl)").Str("fieldName", fieldName).Build()
} else { } else {
if r, ok := c.dataTypeMap[fieldName]; ok { if r, ok := c.dataTypeMap[fieldName]; ok {
return r, nil return r, nil
} else { } else {
return fullTypeRef{}, errors.New("unknown field: '" + fieldName + "'") return fullTypeRef{}, exerr.New(exerr.TypeMongoReflection, "unknown field: '"+fieldName+"'").Str("fieldName", fieldName).Build()
} }
} }
@@ -196,10 +209,10 @@ func (c *Coll[TData]) getFieldValue(data TData, fieldName string) (any, error) {
rval := reflect.ValueOf(data) rval := reflect.ValueOf(data)
return rval.FieldByIndex(fref.Index).Interface(), nil return rval.FieldByIndex(fref.Index).Interface(), nil
} else { } else {
return nil, errors.New("unknown bson field '" + fieldName + "' in type '" + rval.Type().String() + "'") return nil, exerr.New(exerr.TypeMongoReflection, "unknown bson field '"+fieldName+"' in type '"+rval.Type().String()+"'").Str("fieldName", fieldName).Type("rval", rval).Build()
} }
} else { } else {
return nil, errors.New("unknown TData type: '" + rval.Type().String() + "'") return nil, exerr.New(exerr.TypeMongoReflection, "unknown TData type: '"+rval.Type().String()+"'").Type("rval", rval).Build()
} }
} else { } else {
@@ -208,7 +221,7 @@ func (c *Coll[TData]) getFieldValue(data TData, fieldName string) (any, error) {
rval := reflect.ValueOf(data) rval := reflect.ValueOf(data)
return rval.FieldByIndex(fref.Index).Interface(), nil return rval.FieldByIndex(fref.Index).Interface(), nil
} else { } else {
return nil, errors.New("unknown bson field '" + fieldName + "'") return nil, exerr.New(exerr.TypeMongoReflection, "unknown bson field '"+fieldName+"'").Str("fieldName", fieldName).Build()
} }
} }

View File

@@ -23,6 +23,9 @@ func TestReflectionGetFieldType(t *testing.T) {
Sub struct { Sub struct {
A string `bson:"a"` A string `bson:"a"`
} `bson:"sub"` } `bson:"sub"`
SubPtr *struct {
A string `bson:"a"`
} `bson:"subPtr"`
Str string `bson:"str"` Str string `bson:"str"`
Ptr *int `bson:"ptr"` Ptr *int `bson:"ptr"`
MDate rfctime.RFC3339NanoTime `bson:"mdate"` MDate rfctime.RFC3339NanoTime `bson:"mdate"`
@@ -43,6 +46,11 @@ func TestReflectionGetFieldType(t *testing.T) {
}{ }{
A: "2", A: "2",
}, },
SubPtr: &struct {
A string `bson:"a"`
}{
A: "4",
},
Str: "3", Str: "3",
Ptr: langext.Ptr(4), Ptr: langext.Ptr(4),
MDate: t1, MDate: t1,
@@ -82,6 +90,12 @@ func TestReflectionGetFieldType(t *testing.T) {
tst.AssertEqual(t, gft("sub.a").IsPointer, false) tst.AssertEqual(t, gft("sub.a").IsPointer, false)
tst.AssertEqual(t, gfv("sub.a").(string), "2") tst.AssertEqual(t, gfv("sub.a").(string), "2")
tst.AssertEqual(t, gft("subPtr.a").Kind.String(), "string")
tst.AssertEqual(t, gft("subPtr.a").Type.String(), "string")
tst.AssertEqual(t, gft("subPtr.a").Name, "A")
tst.AssertEqual(t, gft("subPtr.a").IsPointer, false)
tst.AssertEqual(t, gfv("subPtr.a").(string), "4")
tst.AssertEqual(t, gft("str").Kind.String(), "string") tst.AssertEqual(t, gft("str").Kind.String(), "string")
tst.AssertEqual(t, gft("str").Type.String(), "string") tst.AssertEqual(t, gft("str").Type.String(), "string")
tst.AssertEqual(t, gft("str").Name, "Str") tst.AssertEqual(t, gft("str").Name, "Str")
@@ -99,16 +113,25 @@ func TestReflectionGetTokenValueAsMongoType(t *testing.T) {
type IDType string type IDType string
type RecurseiveType struct {
Other int `bson:"other"`
Inner *RecurseiveType `bson:"inner"`
}
type TestData struct { type TestData struct {
ID IDType `bson:"_id"` ID IDType `bson:"_id"`
CDate time.Time `bson:"cdate"` CDate time.Time `bson:"cdate"`
Sub struct { Sub struct {
A string `bson:"a"` A string `bson:"a"`
} `bson:"sub"` } `bson:"sub"`
SubPtr *struct {
A string `bson:"a"`
} `bson:"subPtr"`
Str string `bson:"str"` Str string `bson:"str"`
Ptr *int `bson:"ptr"` Ptr *int `bson:"ptr"`
Num int `bson:"num"` Num int `bson:"num"`
MDate rfctime.RFC3339NanoTime `bson:"mdate"` MDate rfctime.RFC3339NanoTime `bson:"mdate"`
Rec RecurseiveType `bson:"rec"`
} }
coll := W[TestData](&mongo.Collection{}) coll := W[TestData](&mongo.Collection{})
@@ -130,6 +153,9 @@ func TestReflectionGetTokenValueAsMongoType(t *testing.T) {
} }
tst.AssertEqual(t, gtvasmt("hello", "str").(string), "hello") tst.AssertEqual(t, gtvasmt("hello", "str").(string), "hello")
tst.AssertEqual(t, gtvasmt("hello", "sub.a").(string), "hello")
tst.AssertEqual(t, gtvasmt("hello", "subPtr.a").(string), "hello")
tst.AssertEqual(t, gtvasmt("4", "rec.other").(int), 4)
tst.AssertEqual(t, gtvasmt("4", "num").(int), 4) tst.AssertEqual(t, gtvasmt("4", "num").(int), 4)
tst.AssertEqual(t, gtvasmt("asdf", "_id").(IDType), "asdf") tst.AssertEqual(t, gtvasmt("asdf", "_id").(IDType), "asdf")
tst.AssertEqual(t, gtvasmt("", "ptr").(*int), nil) tst.AssertEqual(t, gtvasmt("", "ptr").(*int), nil)