Compare commits

..

105 Commits

Author SHA1 Message Date
06b3b4116e v0.0.558 update gojson (rebase onto go1.23.4)
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m19s
2025-01-10 15:37:04 +01:00
ff821390f7 Apply goext specific patches to gojson
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2025-01-10 15:35:14 +01:00
c8e9c34706 Reset gojson to golang/go|1.23.4 [removes all custom changes] 2025-01-10 11:49:29 +01:00
b7c48cb467 v0.0.556
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m29s
2025-01-09 10:41:00 +01:00
a0a80899f5 v0.0.555
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2025-01-09 10:39:56 +01:00
3543441b96 v0.0.554
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2025-01-09 10:39:31 +01:00
eef12da4e6 v0.0.553
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m24s
2025-01-09 10:29:22 +01:00
d009aafd4e v0.0.552 mathext.ClampOpt
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m50s
2025-01-05 03:40:15 +01:00
f7b4aa48d7 v0.0.551 change exerr.RecursiveMessage() logic: use messages of Wrap() if not empty
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m9s
2025-01-04 02:33:49 +01:00
36b092774d v0.0.550 ArrMapSum
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-12-26 00:23:24 +01:00
a8c6e39ac5 v0.0.549
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m4s
2024-12-10 13:24:06 +01:00
62f2ce9268 v0.0.548
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m56s
2024-12-09 17:39:35 +01:00
49375e90f0 v0.0.547 allow calling ListWithCount with pageSize=0
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m55s
2024-12-08 18:04:04 +01:00
d8cf255c80 v0.0.546 Fix ginext json-parse error when the bufferedReader was read beforehand
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m55s
2024-11-28 12:06:57 +01:00
b520282ba0 v0.0.545
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m6s
2024-11-27 13:21:45 +01:00
27cc9366b5 v0.0.544
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m0s
2024-11-26 15:10:27 +01:00
d9517fe73c v0.0.543
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m7s
2024-11-13 15:03:51 +01:00
8a92a6cc52 v0.0.542
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m9s
2024-11-07 13:13:12 +01:00
9b2028ab54 v0.0.541
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m3s
2024-11-05 14:38:42 +01:00
207fd331d5 v0.0.540 handle ct=nil same as ct=Start
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m25s
2024-10-28 14:35:05 +01:00
54b0d6701d v0.0.539
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m49s
2024-10-27 02:21:35 +02:00
fc2657179b v0.0.538
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m7s
2024-10-27 02:18:45 +02:00
d4894e31fe Merge branch 'cursortoken-paginated'
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-27 02:18:25 +02:00
0ddfaf666b v0.0.537 fix goext error print always showing error-type of highest-level error
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-10-26 23:38:13 +02:00
e154137105 Trying out paginated cursortoken variant [UNTESTED]
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m11s
2024-10-25 09:45:42 +02:00
9b9a79b4ad v0.0.536 revert bfcodegen changes
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m24s
2024-10-22 09:57:06 +02:00
5a8d7110e4 v0.0.535
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m52s
2024-10-22 09:41:59 +02:00
d47c84cd47 v0.0.534
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-22 09:40:53 +02:00
c571f3f888 v0.0.533 Made String() functions in bfcodegen nil-ptr safe
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m9s
2024-10-22 09:36:40 +02:00
e884ba6b89 v0.0.532
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m53s
2024-10-13 16:33:39 +02:00
1a8e31e5ef v0.0.531
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m58s
2024-10-13 16:10:55 +02:00
eccc0fe9e5 v0.0.530
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m31s
2024-10-09 11:15:26 +02:00
c8dec24a0d v0.0.529 handle PanicWrappedErr in exerr.FromError()
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m13s
2024-10-08 19:22:17 +02:00
b8cb989e54 v0.0.528
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m38s
2024-10-07 17:20:40 +02:00
ec672fbd49 v0.0.527
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-07 17:19:30 +02:00
cfb0b53fc7 v0.0.526
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m17s
2024-10-05 23:59:23 +02:00
a7389f44fa v0.0.525 upgrad go1.22 -> go1.23
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m4s
2024-10-05 02:45:20 +02:00
69f0fedd66 v0.0.524
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2024-10-05 01:41:10 +02:00
335ef4d8e8 v0.0.523 ringbuffer
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m23s
2024-10-05 01:28:46 +02:00
61801ff20d v0.0.522
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m53s
2024-10-05 01:12:00 +02:00
361dca5c85 v0.0.521 ctxext
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m56s
2024-10-05 01:06:36 +02:00
9f85a243e8 v0.0.520
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m7s
2024-10-05 01:02:25 +02:00
dc6cb274ee v0.0.519
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-05 00:58:15 +02:00
f6b47792a4 v0.0.518 Improve sq db-listener interface (breaking)
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m11s
2024-10-05 00:45:55 +02:00
295b3ef793 v0.0.517 add constructor funcs for tuples
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m21s
2024-10-02 11:31:34 +02:00
721c176337 v0.0.516
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m25s
2024-09-25 21:43:41 +02:00
ebba6545a3 v0.0.515
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m35s
2024-09-16 17:39:51 +02:00
19c7e22ced v0.0.514 fix mongo filter where the primary sort key is null in db (fallback to secondary)
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-09-16 17:39:18 +02:00
9f883b458f v0.0.513
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m55s
2024-09-16 15:27:32 +02:00
1f456c5134 v0.0.512
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m6s
2024-09-15 21:25:21 +02:00
d7fbef37db v0.0.511
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m10s
2024-09-15 18:22:07 +02:00
a1668b6e5a v0.0.510
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m24s
2024-09-13 18:06:49 +02:00
3a17edfaf0 v0.0.509
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 6m2s
2024-08-26 14:35:49 +02:00
3320a9c19d v0.0.508
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m25s
2024-08-25 17:36:20 +02:00
8dcd8a270a v0.0.507 fix jsonfilter:"-" not working
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 7m7s
2024-08-25 15:41:17 +02:00
03a9b276d8 v0.0.506 allow empty-string as value for enum
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 7m36s
2024-08-22 11:45:02 +02:00
9c8cde384f v0.0.505
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 6m17s
2024-08-08 15:57:05 +02:00
99b000ecf4 v0.0.504
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m53s
2024-08-07 19:44:45 +02:00
a173e30090 v0.0.503
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m52s
2024-08-07 19:37:38 +02:00
a3481a7d2d v0.0.502
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:35:23 +02:00
a8e6f98a89 v0.0.501
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:31:36 +02:00
ab805403b9 v0.0.500
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:30:38 +02:00
1e98d351ce v0.0.499
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m24s
2024-08-07 18:34:22 +02:00
c40bdc8e9e v0.0.498
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m13s
2024-08-07 17:26:35 +02:00
7204562879 v0.0.497
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m33s
2024-08-07 17:04:59 +02:00
741611a2e1 v0.0.496 wpdf fixes and wpdf test.go
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m58s
2024-08-07 15:34:06 +02:00
133aeb8374 v0.0.495
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m44s
2024-08-07 14:00:02 +02:00
b78a468632 v0.0.494 add tables to wpdf
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 13:57:29 +02:00
f1b4480e0f v0.0.493 fix panic in RegisterImage for very short images
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m22s
2024-08-07 09:22:37 +02:00
ffffe4bf24 v0.0.492
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m32s
2024-08-02 16:19:21 +02:00
413bf3c848 v0.0.491 small optimization in Paginate method
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 8m31s
2024-07-31 00:15:09 +02:00
646990b549 v0.0.490 documentation and extra-params in exerr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m2s
2024-07-27 23:44:18 +02:00
e5818146a8 v0.0.489
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m54s
2024-07-23 14:21:03 +02:00
1310054121 v0.0.488 fix wpdf with 16bpp images
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m9s
2024-07-22 15:16:28 +02:00
49d423915c v0.0.487
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m54s
2024-07-18 17:45:56 +02:00
1962cb3c52 v0.0.486 add ginext -> CorsAllowHeader
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m51s
2024-07-18 17:29:18 +02:00
84f124dd4d v0.0.485
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m44s
2024-07-16 15:22:18 +02:00
ff8e066135 v0.0.484
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m58s
2024-07-16 15:16:56 +02:00
bc5c61e43d v0.0.483
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m37s
2024-07-16 15:08:37 +02:00
6ded615723 v0.0.482 mathext.Percentile
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m46s
2024-07-12 16:33:42 +02:00
abc8af525a v0.0.481
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m50s
2024-07-04 16:24:49 +02:00
19d943361b v0.0.480
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m20s
2024-07-02 11:32:22 +02:00
b464afae01 v0.0.479 AccessStruct
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m36s
2024-07-02 11:29:47 +02:00
56bc5e8285 v0.0.478
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m41s
2024-07-01 17:23:00 +02:00
cb95bb561c v0.0.477 add langext.StrWrap
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m4s
2024-06-29 15:36:39 +02:00
dff8941bd3 v0.0.476 Ãproperly close cursor in wmo
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m44s
2024-06-28 18:37:02 +02:00
78e1c33e30 v0.0.475 ArrGroupBy
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m36s
2024-06-16 17:14:21 +02:00
d2f2a0558a v0.0.474 Add ZeroLogger config field to exerr.Init to override used zerolog instance
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m27s
2024-06-14 23:18:58 +02:00
fc4bed4b9f v0.0.473 add ctx to wmo.FilterQuery|Sort|Pagination
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m23s
2024-06-14 17:24:59 +02:00
julian
94a7bf250d v0.0.472 changed gin engine initialization
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m34s
2024-06-14 14:56:41 +02:00
f6121a6961 v0.0.471 Revert "v0.0.470 Add GoextJsonMarshaller interface to call when marshalling json via gojson"
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m50s
2024-06-11 19:39:43 +02:00
7fc73f1e93 v0.0.470 Add GoextJsonMarshaller interface to call when marshalling json via gojson
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m51s
2024-06-11 19:34:48 +02:00
2504ef00a0 v0.0.469
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m26s
2024-06-11 12:10:49 +02:00
fc5803493c added DblPtrIfNotNil
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m31s
2024-06-05 17:53:57 +02:00
a9295bfabf added CoalesceDblPtr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m37s
2024-06-05 15:10:31 +02:00
12fa53d848 v0.0.466 exerr.Wrap now inherits the Severity of the wrapped error
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m24s
2024-06-03 13:48:30 +02:00
d2bb362135 v0.0.465
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m39s
2024-06-03 09:39:57 +02:00
9dd81f6bd5 v0.0.464 improve ZeroLogErrTraces/ZeroLogAllTraces output for empty-message wrapped exerrs
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m36s
2024-06-01 02:40:48 +02:00
d2c04afcd5 v0.0.463 Fix SubtractYears
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m29s
2024-05-29 20:20:01 +02:00
62980e1489 v0.0.462
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m32s
2024-05-23 14:37:05 +02:00
59963adf74 v0.0.461
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m17s
2024-05-20 00:52:49 +02:00
194ea4ace5 v0.0.460
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m16s
2024-05-20 00:38:04 +02:00
73b80a66bc v0.0.459
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m16s
2024-05-20 00:20:31 +02:00
d8b2d01274 v0.0.458 revert 457 and fix ObjectFitImage
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m49s
2024-05-20 00:15:24 +02:00
bfa8457e95 v0.0.457 test
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m38s
2024-05-20 00:07:33 +02:00
106 changed files with 7620 additions and 3401 deletions

View File

@@ -8,7 +8,7 @@ This should not have any heavy dependencies (gin, mongo, etc) and add missing ba
Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
### Packages:
## Packages:
| Name | Maintainer | Description |
|-------------|------------|---------------------------------------------------------------------------------------------------------------|
@@ -20,8 +20,9 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
| zipext | Mike | Utility for zip/gzip/tar etc |
| reflectext | Mike | Utility for golang reflection |
| fsext | Mike | Utility for filesytem access |
| ctxext | Mike | Utility for context.Context |
| | | |
| mongoext | Mike | Utility/Helper functions for mongodb |
| mongoext | Mike | Utility/Helper functions for mongodb (kinda abandoned) |
| cursortoken | Mike | MongoDB cursortoken implementation |
| pagination | Mike | Pagination implementation |
| | | |
@@ -42,4 +43,69 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
| | | |
| scn | Mike | SimpleCloudNotifier |
| | | |
| | | |
## Usage:
### exerr
- see **mongoext/builder.go** for full info
Short summary:
- An better error package with metadata, listener, api-output and error-traces
- Initialize with `exerr.Init()`
- *Never* return `err` direct, always use exerr.Wrap(err, "...") - add metadata where applicable
- at the end either Print(), Fatal() or Output() your error (print = stdout, fatal = panic, output = json API response)
- You can add listeners with exerr.RegisterListener(), and save the full errors to a db or smth
### wmo
- A typed wrapper around the official mongo-go-driver
- Use `wmo.W[...](...)` to wrap the collections and type-ify them
- The new collections have all the usual methods, but types
- Also they have List() and Paginate() methods for paginated listings (witehr with a cursortoken or page/limit)
- Register additional hooks with `WithDecodeFunc`, `WithUnmarshalHook`, `WithMarshalHook`, `WithModifyingPipeline`, `WithModifyingPipelineFunc`
- List(), Paginate(), etc support filter interfaces
- Rule(s) of thumb:
- filter the results in the filter interface
- sort the results in the sort function of the filter interface
- add joins ($lookup's) in the `WithModifyingPipelineFunc`/`WithModifyingPipeline`
#### ginext
- A wrapper around gin-gonic/gin
- create the gin engine with `ginext.NewEngine`
- Add routes with `engine.Routes()...`
- `.Use(..)` adds a middleware
- `.Group(..)` adds a group
- `.Get().Handle(..)` adds a handler
- Handler return values (in contract to ginext) - values implement the `ginext.HTTPResponse` interface
- Every handler starts with something like:
```go
func (handler Handler) CommunityMetricsValues(pctx ginext.PreContext) ginext.HTTPResponse {
type communityURI struct {
Version string `uri:"version"`
CommunityID models.CommunityID `uri:"cid"`
}
type body struct {
UserID models.UserID `json:"userID"`
EventID models.EventID `json:"eventID"`
}
var u uri
var b body
ctx, gctx, httpErr := pctx.URI(&u).Body(&b).Start() // can have more unmarshaller, like header, form, etc
if httpErr != nil {
return *httpErr
}
defer ctx.Cancel()
// do stuff
}
```
#### sq
- TODO (like mongoext for sqlite/sql databases)

View File

@@ -46,7 +46,7 @@ var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]
var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<comm>.*))?.*$`))
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]*"|[0-9]+))\s*(//(?P<comm>.*))?.*$`))
var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))

27
ctxext/getter.go Normal file
View File

@@ -0,0 +1,27 @@
package ctxext
import "context"
func Value[T any](ctx context.Context, key any) (T, bool) {
v := ctx.Value(key)
if v == nil {
return *new(T), false
}
if tv, ok := v.(T); !ok {
return *new(T), false
} else {
return tv, true
}
}
func ValueOrDefault[T any](ctx context.Context, key any, def T) T {
v := ctx.Value(key)
if v == nil {
return def
}
if tv, ok := v.(T); !ok {
return def
} else {
return tv
}
}

View File

@@ -6,3 +6,13 @@ const (
SortASC SortDirection = "ASC"
SortDESC SortDirection = "DESC"
)
func (sd SortDirection) ToMongo() int {
if sd == SortASC {
return 1
} else if sd == SortDESC {
return -1
} else {
return 0
}
}

View File

@@ -1,14 +1,15 @@
package cursortoken
import (
"context"
"go.mongodb.org/mongo-driver/mongo"
)
type RawFilter interface {
FilterQuery() mongo.Pipeline
FilterQuery(ctx context.Context) mongo.Pipeline
}
type Filter interface {
FilterQuery() mongo.Pipeline
Pagination() (string, SortDirection, string, SortDirection)
FilterQuery(ctx context.Context) mongo.Pipeline
Pagination(ctx context.Context) (string, SortDirection, string, SortDirection)
}

View File

@@ -3,12 +3,18 @@ package cursortoken
import (
"encoding/base32"
"encoding/json"
"errors"
"go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"strconv"
"strings"
"time"
)
type CursorToken interface {
Token() string
IsStart() bool
IsEnd() bool
}
type Mode string
const (
@@ -24,97 +30,6 @@ type Extra struct {
PageSize *int
}
type CursorToken struct {
Mode Mode
ValuePrimary string
ValueSecondary string
Direction SortDirection
DirectionSecondary SortDirection
PageSize int
Extra Extra
}
type cursorTokenSerialize struct {
ValuePrimary *string `json:"v1,omitempty"`
ValueSecondary *string `json:"v2,omitempty"`
Direction *SortDirection `json:"dir,omitempty"`
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
PageSize *int `json:"size,omitempty"`
ExtraTimestamp *time.Time `json:"ts,omitempty"`
ExtraId *string `json:"id,omitempty"`
ExtraPage *int `json:"pg,omitempty"`
ExtraPageSize *int `json:"sz,omitempty"`
}
func Start() CursorToken {
return CursorToken{
Mode: CTMStart,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func End() CursorToken {
return CursorToken{
Mode: CTMEnd,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func (c *CursorToken) Token() string {
if c.Mode == CTMStart {
return "@start"
}
if c.Mode == CTMEnd {
return "@end"
}
// We kinda manually implement omitempty for the CursorToken here
// because omitempty does not work for time.Time and otherwise we would always
// get weird time values when decoding a token that initially didn't have an Timestamp set
// For this usecase we treat Unix=0 as an empty timestamp
sertok := cursorTokenSerialize{}
if c.ValuePrimary != "" {
sertok.ValuePrimary = &c.ValuePrimary
}
if c.ValueSecondary != "" {
sertok.ValueSecondary = &c.ValueSecondary
}
if c.Direction != "" {
sertok.Direction = &c.Direction
}
if c.DirectionSecondary != "" {
sertok.DirectionSecondary = &c.DirectionSecondary
}
if c.PageSize != 0 {
sertok.PageSize = &c.PageSize
}
sertok.ExtraTimestamp = c.Extra.Timestamp
sertok.ExtraId = c.Extra.Id
sertok.ExtraPage = c.Extra.Page
sertok.ExtraPageSize = c.Extra.PageSize
body, err := json.Marshal(sertok)
if err != nil {
panic(err)
}
return "tok_" + base32.StdEncoding.EncodeToString(body)
}
func Decode(tok string) (CursorToken, error) {
if tok == "" {
return Start(), nil
@@ -125,60 +40,56 @@ func Decode(tok string) (CursorToken, error) {
if strings.ToLower(tok) == "@end" {
return End(), nil
}
if !strings.HasPrefix(tok, "tok_") {
return CursorToken{}, errors.New("could not decode token, missing prefix")
if strings.ToLower(tok) == "$end" {
return PageEnd(), nil
}
if strings.HasPrefix(tok, "$") && len(tok) > 1 {
n, err := strconv.ParseInt(tok[1:], 10, 64)
if err != nil {
return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build()
}
return Page(int(n)), nil
}
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
if err != nil {
return CursorToken{}, err
}
if strings.HasPrefix(tok, "tok_") {
var tokenDeserialize cursorTokenSerialize
err = json.Unmarshal(body, &tokenDeserialize)
if err != nil {
return CursorToken{}, err
}
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
if err != nil {
return nil, err
}
token := CursorToken{Mode: CTMNormal}
var tokenDeserialize cursorTokenKeySortSerialize
err = json.Unmarshal(body, &tokenDeserialize)
if err != nil {
return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build()
}
if tokenDeserialize.ValuePrimary != nil {
token.ValuePrimary = *tokenDeserialize.ValuePrimary
}
if tokenDeserialize.ValueSecondary != nil {
token.ValueSecondary = *tokenDeserialize.ValueSecondary
}
if tokenDeserialize.Direction != nil {
token.Direction = *tokenDeserialize.Direction
}
if tokenDeserialize.DirectionSecondary != nil {
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
}
if tokenDeserialize.PageSize != nil {
token.PageSize = *tokenDeserialize.PageSize
}
token := CTKeySort{Mode: CTMNormal}
token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp
token.Extra.Id = tokenDeserialize.ExtraId
token.Extra.Page = tokenDeserialize.ExtraPage
token.Extra.PageSize = tokenDeserialize.ExtraPageSize
if tokenDeserialize.ValuePrimary != nil {
token.ValuePrimary = *tokenDeserialize.ValuePrimary
}
if tokenDeserialize.ValueSecondary != nil {
token.ValueSecondary = *tokenDeserialize.ValueSecondary
}
if tokenDeserialize.Direction != nil {
token.Direction = *tokenDeserialize.Direction
}
if tokenDeserialize.DirectionSecondary != nil {
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
}
if tokenDeserialize.PageSize != nil {
token.PageSize = *tokenDeserialize.PageSize
}
return token, nil
}
token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp
token.Extra.Id = tokenDeserialize.ExtraId
token.Extra.Page = tokenDeserialize.ExtraPage
token.Extra.PageSize = tokenDeserialize.ExtraPageSize
return token, nil
func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}
func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
return nil, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing/unknown prefix").Str("token", tok).Build()
}
}

136
cursortoken/tokenKeySort.go Normal file
View File

@@ -0,0 +1,136 @@
package cursortoken
import (
"encoding/base32"
"encoding/json"
"go.mongodb.org/mongo-driver/bson/primitive"
"time"
)
type CTKeySort struct {
Mode Mode
ValuePrimary string
ValueSecondary string
Direction SortDirection
DirectionSecondary SortDirection
PageSize int
Extra Extra
}
type cursorTokenKeySortSerialize struct {
ValuePrimary *string `json:"v1,omitempty"`
ValueSecondary *string `json:"v2,omitempty"`
Direction *SortDirection `json:"dir,omitempty"`
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
PageSize *int `json:"size,omitempty"`
ExtraTimestamp *time.Time `json:"ts,omitempty"`
ExtraId *string `json:"id,omitempty"`
ExtraPage *int `json:"pg,omitempty"`
ExtraPageSize *int `json:"sz,omitempty"`
}
func NewKeySortToken(valuePrimary string, valueSecondary string, direction SortDirection, directionSecondary SortDirection, pageSize int, extra Extra) CursorToken {
return CTKeySort{
Mode: CTMNormal,
ValuePrimary: valuePrimary,
ValueSecondary: valueSecondary,
Direction: direction,
DirectionSecondary: directionSecondary,
PageSize: pageSize,
Extra: extra,
}
}
func Start() CursorToken {
return CTKeySort{
Mode: CTMStart,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func End() CursorToken {
return CTKeySort{
Mode: CTMEnd,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func (c CTKeySort) Token() string {
if c.Mode == CTMStart {
return "@start"
}
if c.Mode == CTMEnd {
return "@end"
}
// We kinda manually implement omitempty for the CursorToken here
// because omitempty does not work for time.Time and otherwise we would always
// get weird time values when decoding a token that initially didn't have an Timestamp set
// For this usecase we treat Unix=0 as an empty timestamp
sertok := cursorTokenKeySortSerialize{}
if c.ValuePrimary != "" {
sertok.ValuePrimary = &c.ValuePrimary
}
if c.ValueSecondary != "" {
sertok.ValueSecondary = &c.ValueSecondary
}
if c.Direction != "" {
sertok.Direction = &c.Direction
}
if c.DirectionSecondary != "" {
sertok.DirectionSecondary = &c.DirectionSecondary
}
if c.PageSize != 0 {
sertok.PageSize = &c.PageSize
}
sertok.ExtraTimestamp = c.Extra.Timestamp
sertok.ExtraId = c.Extra.Id
sertok.ExtraPage = c.Extra.Page
sertok.ExtraPageSize = c.Extra.PageSize
body, err := json.Marshal(sertok)
if err != nil {
panic(err)
}
return "tok_" + base32.StdEncoding.EncodeToString(body)
}
func (c CTKeySort) IsEnd() bool {
return c.Mode == CTMEnd
}
func (c CTKeySort) IsStart() bool {
return c.Mode == CTMStart
}
func (c CTKeySort) valuePrimaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}
func (c CTKeySort) valueSecondaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}

View File

@@ -0,0 +1,41 @@
package cursortoken
import "strconv"
type CTPaginated struct {
Mode Mode
Page int
}
func Page(p int) CursorToken {
return CTPaginated{
Mode: CTMNormal,
Page: p,
}
}
func PageEnd() CursorToken {
return CTPaginated{
Mode: CTMEnd,
Page: 0,
}
}
func (c CTPaginated) Token() string {
if c.Mode == CTMStart {
return "$1"
}
if c.Mode == CTMEnd {
return "$end"
}
return "$" + strconv.Itoa(c.Page)
}
func (c CTPaginated) IsEnd() bool {
return c.Mode == CTMEnd
}
func (c CTPaginated) IsStart() bool {
return c.Mode == CTMStart || c.Page == 1
}

View File

@@ -115,6 +115,9 @@ func (b *bufferedReadCloser) BufferedAll() ([]byte, error) {
return nil, err
}
}
if err := b.Reset(); err != nil {
return nil, err
}
return b.buffer, nil
case modeSourceFinished:
@@ -131,10 +134,22 @@ func (b *bufferedReadCloser) BufferedAll() ([]byte, error) {
}
}
// Reset resets the buffer to the beginning of the buffer.
// If the original source is partially read, we will finish reading it and fill our buffer
func (b *bufferedReadCloser) Reset() error {
switch b.mode {
case modeSourceReading:
fallthrough
if b.off == 0 {
return nil // nobody has read anything yet
}
err := b.Close()
if err != nil {
return err
}
b.mode = modeBufferReading
b.off = 0
return nil
case modeSourceFinished:
err := b.Close()
if err != nil {

View File

@@ -3,6 +3,7 @@ package dataext
import (
"encoding/json"
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type JsonOpt[T any] struct {
@@ -10,6 +11,14 @@ type JsonOpt[T any] struct {
value T
}
func NewJsonOpt[T any](v T) JsonOpt[T] {
return JsonOpt[T]{isSet: true, value: v}
}
func EmptyJsonOpt[T any]() JsonOpt[T] {
return JsonOpt[T]{isSet: false}
}
// MarshalJSON returns m as the JSON encoding of m.
func (m JsonOpt[T]) MarshalJSON() ([]byte, error) {
if !m.isSet {
@@ -51,9 +60,24 @@ func (m JsonOpt[T]) ValueOrNil() *T {
return &m.value
}
func (m JsonOpt[T]) ValueDblPtrOrNil() **T {
if !m.isSet {
return nil
}
return langext.DblPtr(m.value)
}
func (m JsonOpt[T]) MustValue() T {
if !m.isSet {
panic("value not set")
}
return m.value
}
func (m JsonOpt[T]) IfSet(fn func(v T)) bool {
if !m.isSet {
return false
}
fn(m.value)
return true
}

144
dataext/ringBuffer.go Normal file
View File

@@ -0,0 +1,144 @@
package dataext
import "iter"
type RingBuffer[T any] struct {
items []T //
capacity int // max number of items the buffer can hold
size int // how many items are in the buffer
head int // ptr to next item
}
func NewRingBuffer[T any](capacity int) *RingBuffer[T] {
return &RingBuffer[T]{
items: make([]T, capacity),
capacity: capacity,
size: 0,
head: 0,
}
}
func (rb *RingBuffer[T]) Push(item T) {
if rb.size < rb.capacity {
rb.size++
}
rb.items[rb.head] = item
rb.head = (rb.head + 1) % rb.capacity
}
func (rb *RingBuffer[T]) PushPop(item T) *T {
if rb.size < rb.capacity {
rb.size++
rb.items[rb.head] = item
rb.head = (rb.head + 1) % rb.capacity
return nil
} else {
prev := rb.items[rb.head]
rb.items[rb.head] = item
rb.head = (rb.head + 1) % rb.capacity
return &prev
}
}
func (rb *RingBuffer[T]) Peek() (T, bool) {
if rb.size == 0 {
return *new(T), false
}
return rb.items[(rb.head-1+rb.capacity)%rb.capacity], true
}
func (rb *RingBuffer[T]) Items() []T {
if rb.size < rb.capacity {
return rb.items[:rb.size]
}
return append(rb.items[rb.head:], rb.items[:rb.head]...)
}
func (rb *RingBuffer[T]) Size() int {
return rb.size
}
func (rb *RingBuffer[T]) Capacity() int {
return rb.capacity
}
func (rb *RingBuffer[T]) Clear() {
rb.size = 0
rb.head = 0
}
func (rb *RingBuffer[T]) IsFull() bool {
return rb.size == rb.capacity
}
func (rb *RingBuffer[T]) At(i int) T {
if i < 0 || i >= rb.size {
panic("Index out of bounds")
}
if rb.size < rb.capacity {
return rb.items[i]
}
return rb.items[(rb.head+i)%rb.capacity]
}
func (rb *RingBuffer[T]) Get(i int) (T, bool) {
if i < 0 || i >= rb.size {
return *new(T), false
}
if rb.size < rb.capacity {
return rb.items[i], true
}
return rb.items[(rb.head+i)%rb.capacity], true
}
func (rb *RingBuffer[T]) Iter() iter.Seq[T] {
return func(yield func(T) bool) {
for i := 0; i < rb.size; i++ {
if !yield(rb.At(i)) {
return
}
}
}
}
func (rb *RingBuffer[T]) Iter2() iter.Seq2[int, T] {
return func(yield func(int, T) bool) {
for i := 0; i < rb.size; i++ {
if !yield(i, rb.At(i)) {
return
}
}
}
}
func (rb *RingBuffer[T]) Remove(fnEqual func(v T) bool) int {
// Mike [2024-11-13]: I *really* tried to write an in-place algorithm to remove elements
// But after carful consideration, I left that as an exercise for future readers
// It is, suprisingly, non-trivial, especially because the head-ptr must be weirdly updated
// And out At() method does not work correctly with {head<>0 && size<capacity}
dc := 0
b := make([]T, rb.capacity)
bsize := 0
for i := 0; i < rb.size; i++ {
comp := rb.At(i)
if fnEqual(comp) {
dc++
} else {
b[bsize] = comp
bsize++
}
}
if dc == 0 {
return 0
}
rb.items = b
rb.size = bsize
rb.head = bsize % rb.capacity
return dc
}

447
dataext/ringBuffer_test.go Normal file
View File

@@ -0,0 +1,447 @@
package dataext
import "testing"
func TestRingBufferPushAddsItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
if rb.Size() != 1 {
t.Errorf("Expected size 1, got %d", rb.Size())
}
if item, _ := rb.Peek(); item != 1 {
t.Errorf("Expected item 1, got %d", item)
}
}
func TestRingBufferPushPopReturnsOldestItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
if item := rb.PushPop(4); item == nil || *item != 1 {
t.Errorf("Expected item 1, got %v", item)
}
}
func TestRingBufferPeekReturnsLastPushedItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
if item, _ := rb.Peek(); item != 2 {
t.Errorf("Expected item 2, got %d", item)
}
}
func TestRingBufferOverflow1(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
if rb.Size() != 5 {
t.Errorf("Expected size 4, got %d", rb.Size())
}
expected := []int{3, 9, 4, 5, 7}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferItemsReturnsAllItems(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
items := rb.Items()
expected := []int{1, 2, 3}
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferClearEmptiesBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Clear()
if rb.Size() != 0 {
t.Errorf("Expected size 0, got %d", rb.Size())
}
}
func TestRingBufferIsFullReturnsTrueWhenFull(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
if !rb.IsFull() {
t.Errorf("Expected buffer to be full")
}
}
func TestRingBufferAtReturnsCorrectItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
if item := rb.At(1); item != 2 {
t.Errorf("Expected item 2, got %d", item)
}
}
func TestRingBufferGetReturnsCorrectItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
if item, ok := rb.Get(1); !ok || item != 2 {
t.Errorf("Expected item 2, got %d", item)
}
}
func TestRingBufferRemoveDeletesMatchingItems(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(2)
rb.Push(4)
removed := rb.Remove(func(v int) bool { return v == 2 })
if removed != 2 {
t.Errorf("Expected 2 items removed, got %d", removed)
}
if rb.Size() != 3 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{1, 3, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems2(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(2)
rb.Push(4)
removed := rb.Remove(func(v int) bool { return v == 3 })
if removed != 1 {
t.Errorf("Expected 2 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{1, 2, 2, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems3(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(9)
rb.Push(4)
removed := rb.Remove(func(v int) bool { return v == 3 })
if removed != 1 {
t.Errorf("Expected 2 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{1, 2, 9, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems4(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
removed := rb.Remove(func(v int) bool { return v == 7 })
if removed != 1 {
t.Errorf("Expected 1 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 4, got %d", rb.Size())
}
expected := []int{3, 9, 4, 5}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems5(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
removed := rb.Remove(func(v int) bool { return v == 3 })
if removed != 1 {
t.Errorf("Expected 1 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 4, got %d", rb.Size())
}
expected := []int{9, 4, 5, 7}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems6(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
removed := rb.Remove(func(v int) bool { return v == 1 })
if removed != 0 {
t.Errorf("Expected 0 items removed, got %d", removed)
}
if rb.Size() != 5 {
t.Errorf("Expected size 5, got %d", rb.Size())
}
expected := []int{3, 9, 4, 5, 7}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
if !rb.IsFull() {
t.Errorf("Expected buffer to not be full")
}
}
func TestRingBufferRemoveDeletesMatchingItems7(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
removed := rb.Remove(func(v int) bool { return v == 9 })
if removed != 1 {
t.Errorf("Expected 1 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 4, got %d", rb.Size())
}
expected := []int{3, 4, 5, 7}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
if rb.IsFull() {
t.Errorf("Expected buffer to not be full")
}
}
func TestRingBufferAddItemsToFullRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(4)
if rb.Size() != 3 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{2, 3, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferAddItemsToNonFullRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
if rb.Size() != 2 {
t.Errorf("Expected size 2, got %d", rb.Size())
}
expected := []int{1, 2}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveItemsFromNonFullRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
removed := rb.Remove(func(v int) bool { return v == 1 })
if removed != 1 {
t.Errorf("Expected 1 item removed, got %d", removed)
}
if rb.Size() != 1 {
t.Errorf("Expected size 1, got %d", rb.Size())
}
expected := []int{2}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveItemsFromFullRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
removed := rb.Remove(func(v int) bool { return v == 2 })
if removed != 1 {
t.Errorf("Expected 1 item removed, got %d", removed)
}
if rb.Size() != 2 {
t.Errorf("Expected size 2, got %d", rb.Size())
}
expected := []int{1, 3}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveMultipleItemsFromRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(2)
rb.Push(4)
removed := rb.Remove(func(v int) bool { return v == 2 })
if removed != 2 {
t.Errorf("Expected 2 items removed, got %d", removed)
}
if rb.Size() != 3 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{1, 3, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveAllItemsFromRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
removed := rb.Remove(func(v int) bool { return true })
if removed != 3 {
t.Errorf("Expected 3 items removed, got %d", removed)
}
if rb.Size() != 0 {
t.Errorf("Expected size 0, got %d", rb.Size())
}
}
func TestRingBufferRemoveNoItemsFromRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
removed := rb.Remove(func(v int) bool { return false })
if removed != 0 {
t.Errorf("Expected 0 items removed, got %d", removed)
}
if rb.Size() != 3 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
}
func TestRingBufferIteratesOverAllItems(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
expected := []int{1, 2, 3}
i := 0
for item := range rb.Iter() {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
i++
}
if i != len(expected) {
t.Errorf("Expected to iterate over %d items, but iterated over %d", len(expected), i)
}
}
func TestRingBufferIter2IteratesOverAllItemsWithIndices(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
expected := []int{1, 2, 3}
i := 0
for index, item := range rb.Iter2() {
if index != i {
t.Errorf("Expected index %d, got %d", i, index)
}
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
i++
}
if i != len(expected) {
t.Errorf("Expected to iterate over %d items, but iterated over %d", len(expected), i)
}
}

View File

@@ -7,6 +7,10 @@ type SyncMap[TKey comparable, TData any] struct {
lock sync.Mutex
}
func NewSyncMap[TKey comparable, TData any]() *SyncMap[TKey, TData] {
return &SyncMap[TKey, TData]{data: make(map[TKey]TData), lock: sync.Mutex{}}
}
func (s *SyncMap[TKey, TData]) Set(key TKey, data TData) {
s.lock.Lock()
defer s.lock.Unlock()

143
dataext/syncRingSet.go Normal file
View File

@@ -0,0 +1,143 @@
package dataext
import "sync"
type SyncRingSet[TData comparable] struct {
data map[TData]bool
lock sync.Mutex
ring *RingBuffer[TData]
}
func NewSyncRingSet[TData comparable](capacity int) *SyncRingSet[TData] {
return &SyncRingSet[TData]{
data: make(map[TData]bool, capacity+1),
lock: sync.Mutex{},
ring: NewRingBuffer[TData](capacity),
}
}
// Add adds `value` to the set
// returns true if the value was actually inserted (value did not exist beforehand)
// returns false if the value already existed
func (s *SyncRingSet[TData]) Add(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
_, existsInPreState := s.data[value]
if existsInPreState {
return false
}
prev := s.ring.PushPop(value)
s.data[value] = true
if prev != nil {
delete(s.data, *prev)
}
return true
}
func (s *SyncRingSet[TData]) AddAll(values []TData) {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
for _, value := range values {
_, existsInPreState := s.data[value]
if existsInPreState {
continue
}
prev := s.ring.PushPop(value)
s.data[value] = true
if prev != nil {
delete(s.data, *prev)
}
}
}
func (s *SyncRingSet[TData]) Remove(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
_, existsInPreState := s.data[value]
if !existsInPreState {
return false
}
delete(s.data, value)
s.ring.Remove(func(v TData) bool { return value == v })
return true
}
func (s *SyncRingSet[TData]) RemoveAll(values []TData) {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
for _, value := range values {
delete(s.data, value)
s.ring.Remove(func(v TData) bool { return value == v })
}
}
func (s *SyncRingSet[TData]) Contains(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
_, ok := s.data[value]
return ok
}
func (s *SyncRingSet[TData]) Get() []TData {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
r := make([]TData, 0, len(s.data))
for k := range s.data {
r = append(r, k)
}
return r
}
// AddIfNotContains
// returns true if the value was actually added (value did not exist beforehand)
// returns false if the value already existed
func (s *SyncRingSet[TData]) AddIfNotContains(key TData) bool {
return s.Add(key)
}
// RemoveIfContains
// returns true if the value was actually removed (value did exist beforehand)
// returns false if the value did not exist in the set
func (s *SyncRingSet[TData]) RemoveIfContains(key TData) bool {
return s.Remove(key)
}

View File

@@ -7,8 +7,12 @@ type SyncSet[TData comparable] struct {
lock sync.Mutex
}
func NewSyncSet[TData comparable]() *SyncSet[TData] {
return &SyncSet[TData]{data: make(map[TData]bool), lock: sync.Mutex{}}
}
// Add adds `value` to the set
// returns true if the value was actually inserted
// returns true if the value was actually inserted (value did not exist beforehand)
// returns false if the value already existed
func (s *SyncSet[TData]) Add(value TData) bool {
s.lock.Lock()
@@ -19,9 +23,12 @@ func (s *SyncSet[TData]) Add(value TData) bool {
}
_, existsInPreState := s.data[value]
s.data[value] = true
if existsInPreState {
return false
}
return !existsInPreState
s.data[value] = true
return true
}
func (s *SyncSet[TData]) AddAll(values []TData) {
@@ -37,6 +44,36 @@ func (s *SyncSet[TData]) AddAll(values []TData) {
}
}
func (s *SyncSet[TData]) Remove(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
_, existsInPreState := s.data[value]
if !existsInPreState {
return false
}
delete(s.data, value)
return true
}
func (s *SyncSet[TData]) RemoveAll(values []TData) {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
for _, value := range values {
delete(s.data, value)
}
}
func (s *SyncSet[TData]) Contains(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
@@ -66,3 +103,17 @@ func (s *SyncSet[TData]) Get() []TData {
return r
}
// AddIfNotContains
// returns true if the value was actually added (value did not exist beforehand)
// returns false if the value already existed
func (s *SyncSet[TData]) AddIfNotContains(key TData) bool {
return s.Add(key)
}
// RemoveIfContains
// returns true if the value was actually removed (value did exist beforehand)
// returns false if the value did not exist in the set
func (s *SyncSet[TData]) RemoveIfContains(key TData) bool {
return s.Remove(key)
}

View File

@@ -19,6 +19,14 @@ func (s Single[T1]) TupleValues() []any {
return []any{s.V1}
}
func NewSingle[T1 any](v1 T1) Single[T1] {
return Single[T1]{V1: v1}
}
func NewTuple1[T1 any](v1 T1) Single[T1] {
return Single[T1]{V1: v1}
}
// ----------------------------------------------------------------------------
type Tuple[T1 any, T2 any] struct {
@@ -34,6 +42,14 @@ func (t Tuple[T1, T2]) TupleValues() []any {
return []any{t.V1, t.V2}
}
func NewTuple[T1 any, T2 any](v1 T1, v2 T2) Tuple[T1, T2] {
return Tuple[T1, T2]{V1: v1, V2: v2}
}
func NewTuple2[T1 any, T2 any](v1 T1, v2 T2) Tuple[T1, T2] {
return Tuple[T1, T2]{V1: v1, V2: v2}
}
// ----------------------------------------------------------------------------
type Triple[T1 any, T2 any, T3 any] struct {
@@ -50,6 +66,14 @@ func (t Triple[T1, T2, T3]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3}
}
func NewTriple[T1 any, T2 any, T3 any](v1 T1, v2 T2, v3 T3) Triple[T1, T2, T3] {
return Triple[T1, T2, T3]{V1: v1, V2: v2, V3: v3}
}
func NewTuple3[T1 any, T2 any, T3 any](v1 T1, v2 T2, v3 T3) Triple[T1, T2, T3] {
return Triple[T1, T2, T3]{V1: v1, V2: v2, V3: v3}
}
// ----------------------------------------------------------------------------
type Quadruple[T1 any, T2 any, T3 any, T4 any] struct {
@@ -67,6 +91,14 @@ func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4}
}
func NewQuadruple[T1 any, T2 any, T3 any, T4 any](v1 T1, v2 T2, v3 T3, v4 T4) Quadruple[T1, T2, T3, T4] {
return Quadruple[T1, T2, T3, T4]{V1: v1, V2: v2, V3: v3, V4: v4}
}
func NewTuple4[T1 any, T2 any, T3 any, T4 any](v1 T1, v2 T2, v3 T3, v4 T4) Quadruple[T1, T2, T3, T4] {
return Quadruple[T1, T2, T3, T4]{V1: v1, V2: v2, V3: v3, V4: v4}
}
// ----------------------------------------------------------------------------
type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct {
@@ -86,6 +118,14 @@ func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any {
}
func NewQuintuple[T1 any, T2 any, T3 any, T4 any, T5 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5) Quintuple[T1, T2, T3, T4, T5] {
return Quintuple[T1, T2, T3, T4, T5]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5}
}
func NewTuple5[T1 any, T2 any, T3 any, T4 any, T5 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5) Quintuple[T1, T2, T3, T4, T5] {
return Quintuple[T1, T2, T3, T4, T5]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5}
}
// ----------------------------------------------------------------------------
type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct {
@@ -106,6 +146,14 @@ func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any {
}
func NewSextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6) Sextuple[T1, T2, T3, T4, T5, T6] {
return Sextuple[T1, T2, T3, T4, T5, T6]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6}
}
func NewTuple6[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6) Sextuple[T1, T2, T3, T4, T5, T6] {
return Sextuple[T1, T2, T3, T4, T5, T6]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6}
}
// ----------------------------------------------------------------------------
type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct {
@@ -126,6 +174,14 @@ func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7}
}
func NewSeptuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7) Septuple[T1, T2, T3, T4, T5, T6, T7] {
return Septuple[T1, T2, T3, T4, T5, T6, T7]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7}
}
func NewTuple7[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7) Septuple[T1, T2, T3, T4, T5, T6, T7] {
return Septuple[T1, T2, T3, T4, T5, T6, T7]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7}
}
// ----------------------------------------------------------------------------
type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct {
@@ -147,6 +203,14 @@ func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8}
}
func NewOctuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8) Octuple[T1, T2, T3, T4, T5, T6, T7, T8] {
return Octuple[T1, T2, T3, T4, T5, T6, T7, T8]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8}
}
func NewTuple8[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8) Octuple[T1, T2, T3, T4, T5, T6, T7, T8] {
return Octuple[T1, T2, T3, T4, T5, T6, T7, T8]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8}
}
// ----------------------------------------------------------------------------
type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct {
@@ -168,3 +232,10 @@ func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int {
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9}
}
func NewNonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8, v9 T9) Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9] {
return Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8, V9: v9}
}
func NewTuple9[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8, v9 T9) Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9] {
return Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8, V9: v9}
}

View File

@@ -30,6 +30,10 @@ import (
// If possible add metadata to the error (eg the id that was not found, ...), the methods are the same as in zerolog
// return nil, exerror.Wrap(err, "do something failed").Str("someid", id).Int("count", in.Count).Build()
//
// You can also add extra-data to an error with Extra(..)
// in contrast to metadata is extradata always printed in the resulting error and is more intended for additional (programmatically readable) data in addition to the errortype
// (metadata is more internal debug info/help)
//
// You can change the errortype with `.User()` and `.System()` (User-errors are 400 and System-errors 500)
// You can also manually set the statuscode with `.WithStatuscode(http.NotFound)`
// You can set the type with `WithType(..)`
@@ -55,18 +59,6 @@ import (
// => Wrap/New + Fatal
//
var stackSkipLogger zerolog.Logger
func init() {
cw := zerolog.ConsoleWriter{
Out: os.Stdout,
TimeFormat: "2006-01-02 15:04:05 Z07:00",
}
multi := zerolog.MultiLevelWriter(cw)
stackSkipLogger = zerolog.New(multi).With().Timestamp().CallerWithSkipFrameCount(4).Logger()
}
type Builder struct {
wrappedErr error
errorData *ExErr
@@ -88,12 +80,14 @@ func Wrap(err error, msg string) *Builder {
return &Builder{errorData: newExErr(CatSystem, TypeInternal, msg)} // prevent NPE if we call Wrap with err==nil
}
v := FromError(err)
if !pkgconfig.RecursiveErrors {
v := FromError(err)
v.Message = msg
return &Builder{wrappedErr: err, errorData: v}
} else {
return &Builder{wrappedErr: err, errorData: wrapExErr(v, msg, CatWrap, 1)}
}
return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)}
}
// ----------------------------------------------------------------------------
@@ -380,29 +374,6 @@ func (b *Builder) CtxData(method Method, ctx context.Context) *Builder {
return b
}
func formatHeader(header map[string][]string) string {
ml := 1
for k, _ := range header {
if len(k) > ml {
ml = len(k)
}
}
r := ""
for k, v := range header {
if r != "" {
r += "\n"
}
for _, hval := range v {
value := hval
value = strings.ReplaceAll(value, "\n", "\\n")
value = strings.ReplaceAll(value, "\r", "\\r")
value = strings.ReplaceAll(value, "\t", "\\t")
r += langext.StrPadRight(k, " ", ml) + " := " + value
}
}
return r
}
func extractHeader(header map[string][]string) []string {
r := make([]string, 0, len(header))
for k, v := range header {
@@ -419,6 +390,16 @@ func extractHeader(header map[string][]string) []string {
// ----------------------------------------------------------------------------
// Extra adds additional data to the error
// this is not like the other metadata (like Id(), Str(), etc)
// this data is public and will be printed/outputted
func (b *Builder) Extra(key string, val any) *Builder {
b.errorData.Extra[key] = val
return b
}
// ----------------------------------------------------------------------------
// Build creates a new error, ready to pass up the stack
// If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout
// Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces
@@ -435,9 +416,9 @@ func (b *Builder) Build(ctxs ...context.Context) error {
}
if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) {
b.errorData.ShortLog(stackSkipLogger.Error())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Error())
} else if pkgconfig.ZeroLogAllTraces && !b.noLog {
b.errorData.ShortLog(stackSkipLogger.Error())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Error())
}
b.errorData.CallListener(MethodBuild)
@@ -459,9 +440,9 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) {
b.errorData.Output(g)
if (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) && (pkgconfig.ZeroLogErrGinOutput || pkgconfig.ZeroLogAllGinOutput) {
b.errorData.Log(stackSkipLogger.Error())
b.errorData.Log(pkgconfig.ZeroLogger.Error())
} else if (b.errorData.Severity == SevWarn) && (pkgconfig.ZeroLogAllGinOutput) {
b.errorData.Log(stackSkipLogger.Warn())
b.errorData.Log(pkgconfig.ZeroLogger.Warn())
}
b.errorData.CallListener(MethodOutput)
@@ -469,7 +450,7 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) {
// Print prints the error
// If the error is SevErr we also send it to the error-service
func (b *Builder) Print(ctxs ...context.Context) {
func (b *Builder) Print(ctxs ...context.Context) Proxy {
warnOnPkgConfigNotInitialized()
for _, dctx := range ctxs {
@@ -477,12 +458,18 @@ func (b *Builder) Print(ctxs ...context.Context) {
}
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
b.errorData.Log(stackSkipLogger.Error())
b.errorData.Log(pkgconfig.ZeroLogger.Error())
} else if b.errorData.Severity == SevWarn {
b.errorData.ShortLog(stackSkipLogger.Warn())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Warn())
} else if b.errorData.Severity == SevInfo {
b.errorData.ShortLog(pkgconfig.ZeroLogger.Info())
} else {
b.errorData.ShortLog(pkgconfig.ZeroLogger.Debug())
}
b.errorData.CallListener(MethodPrint)
return Proxy{v: *b.errorData} // we return Proxy<Exerr> here instead of Exerr to prevent warnings on ignored err-returns
}
func (b *Builder) Format(level LogPrintLevel) string {
@@ -493,12 +480,13 @@ func (b *Builder) Format(level LogPrintLevel) string {
// If the error is SevErr we also send it to the error-service
func (b *Builder) Fatal(ctxs ...context.Context) {
b.errorData.Severity = SevFatal
for _, dctx := range ctxs {
b.CtxData(MethodFatal, dctx)
}
b.errorData.Severity = SevFatal
b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel))
b.errorData.Log(pkgconfig.ZeroLogger.WithLevel(zerolog.FatalLevel))
b.errorData.CallListener(MethodFatal)

View File

@@ -12,11 +12,78 @@ import (
var reflectTypeStr = reflect.TypeOf("")
func FromError(err error) *ExErr {
if err == nil {
// prevent NPE if we call FromError with err==nil
return &ExErr{
UniqueID: newID(),
Category: CatForeign,
Type: TypeInternal,
Severity: SevErr,
Timestamp: time.Time{},
StatusCode: nil,
Message: "",
WrappedErrType: "nil",
WrappedErr: err,
Caller: "",
OriginalError: nil,
Meta: make(MetaMap),
Extra: make(map[string]any),
}
}
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := err.(*ExErr); ok {
// A simple ExErr
return verr
}
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := err.(langext.PanicWrappedErr); ok {
return &ExErr{
UniqueID: newID(),
Category: CatForeign,
Type: TypePanic,
Severity: SevErr,
Timestamp: time.Time{},
StatusCode: nil,
Message: "A panic occured",
WrappedErrType: fmt.Sprintf("%T", verr),
WrappedErr: err,
Caller: "",
OriginalError: nil,
Meta: MetaMap{
"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())},
"panic_type": {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())},
"stack": {DataType: MDTString, Value: verr.Stack},
},
Extra: make(map[string]any),
}
}
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := err.(*langext.PanicWrappedErr); ok && verr != nil {
return &ExErr{
UniqueID: newID(),
Category: CatForeign,
Type: TypePanic,
Severity: SevErr,
Timestamp: time.Time{},
StatusCode: nil,
Message: "A panic occured",
WrappedErrType: fmt.Sprintf("%T", verr),
WrappedErr: err,
Caller: "",
OriginalError: nil,
Meta: MetaMap{
"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())},
"panic_type": {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())},
"stack": {DataType: MDTString, Value: verr.Stack},
},
Extra: make(map[string]any),
}
}
// A foreign error (eg a MongoDB exception)
return &ExErr{
UniqueID: newID(),
@@ -31,6 +98,7 @@ func FromError(err error) *ExErr {
Caller: "",
OriginalError: nil,
Meta: getForeignMeta(err),
Extra: make(map[string]any),
}
}
@@ -48,6 +116,7 @@ func newExErr(cat ErrorCategory, errtype ErrorType, msg string) *ExErr {
Caller: callername(2),
OriginalError: nil,
Meta: make(map[string]MetaValue),
Extra: make(map[string]any),
}
}
@@ -56,7 +125,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE
UniqueID: newID(),
Category: cat,
Type: TypeWrap,
Severity: SevErr,
Severity: e.Severity,
Timestamp: time.Now(),
StatusCode: e.StatusCode,
Message: msg,
@@ -65,6 +134,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE
Caller: callername(1 + stacktraceskip),
OriginalError: e,
Meta: make(map[string]MetaValue),
Extra: langext.CopyMap(langext.ForceMap(e.Extra)),
}
}

View File

@@ -23,6 +23,7 @@ var (
TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
TypePanic = NewType("PANIC", langext.Ptr(500))
TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
TypeAssert = NewType("ASSERT", langext.Ptr(500))
TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))

View File

@@ -4,8 +4,10 @@ import (
"context"
"fmt"
"github.com/gin-gonic/gin"
"github.com/rs/zerolog"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"net/http"
"os"
)
type ErrorPackageConfig struct {
@@ -21,6 +23,7 @@ type ErrorPackageConfig struct {
ZeroLogAllGinOutput bool // autom print zerolog logs on ginext.Error() / .Output(gin) (for all Severities)
ExtendGinMeta func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) // (Optionally) extend the final error meta values with additional data from the gin context (a few are automatically added, here more can be included)
ExtendContextMeta func(b *Builder, method Method, dctx context.Context) // (Optionally) extend the final error meta values with additional data from the context (a few are automatically added, here more can be included)
ZeroLogger zerolog.Logger // The logger used to print exerr log messages
}
type ErrorPackageConfigInit struct {
@@ -36,6 +39,7 @@ type ErrorPackageConfigInit struct {
ZeroLogAllGinOutput *bool
ExtendGinMeta func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request)
ExtendContextMeta func(b *Builder, method Method, dctx context.Context)
ZeroLogger *zerolog.Logger
}
var initialized = false
@@ -81,6 +85,13 @@ func Init(cfg ErrorPackageConfigInit) {
egcm = cfg.ExtendContextMeta
}
var logger zerolog.Logger
if cfg.ZeroLogger != nil {
logger = *cfg.ZeroLogger
} else {
logger = newDefaultLogger()
}
pkgconfig = ErrorPackageConfig{
ZeroLogErrTraces: langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces),
ZeroLogAllTraces: langext.Coalesce(cfg.ZeroLogAllTraces, pkgconfig.ZeroLogAllTraces),
@@ -94,11 +105,23 @@ func Init(cfg ErrorPackageConfigInit) {
ZeroLogErrGinOutput: langext.Coalesce(cfg.ZeroLogErrGinOutput, pkgconfig.ZeroLogErrGinOutput),
ExtendGinMeta: egm,
ExtendContextMeta: egcm,
ZeroLogger: logger,
}
initialized = true
}
func newDefaultLogger() zerolog.Logger {
cw := zerolog.ConsoleWriter{
Out: os.Stdout,
TimeFormat: "2006-01-02 15:04:05 Z07:00",
}
multi := zerolog.MultiLevelWriter(cw)
return zerolog.New(multi).With().Timestamp().CallerWithSkipFrameCount(4).Logger()
}
func Initialized() bool {
return initialized
}

View File

@@ -1,6 +1,7 @@
package exerr
import (
"fmt"
"github.com/rs/xid"
"github.com/rs/zerolog"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
@@ -26,7 +27,8 @@ type ExErr struct {
OriginalError *ExErr `json:"originalError"`
Meta MetaMap `json:"meta"`
Extra map[string]any `json:"extra"`
Meta MetaMap `json:"meta"`
}
func (ee *ExErr) Error() string {
@@ -36,6 +38,13 @@ func (ee *ExErr) Error() string {
// Unwrap must be implemented so that some error.XXX methods work
func (ee *ExErr) Unwrap() error {
if ee.OriginalError == nil {
if ee.WrappedErr != nil {
if werr, ok := ee.WrappedErr.(error); ok {
return werr
}
}
return nil // this is neccessary - otherwise we return a wrapped nil and the `x == nil` comparison fails (= panic in errors.Is and other failures)
}
return ee.OriginalError
@@ -81,9 +90,29 @@ func (ee *ExErr) Log(evt *zerolog.Event) {
}
func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
// [LogPrintShort]
//
// - Only print message and type
// - Used e.g. for logging to the console when Build is called
// - also used in Print() if level == Warn/Info
//
// [LogPrintOverview]
//
// - print message, extra and errortrace
//
// [LogPrintFull]
//
// - print full error, with meta and extra, and trace, etc
// - Used in Output() and Print()
//
if lvl == LogPrintShort {
msg := ee.Message
if msg == "" {
msg = ee.RecursiveMessage()
}
if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign {
msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")"
}
@@ -98,6 +127,10 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
for exk, exv := range ee.Extra {
str += fmt.Sprintf(" # [[[ %s ==> %v ]]]\n", exk, exv)
}
indent := ""
for curr := ee; curr != nil; curr = curr.OriginalError {
indent += " "
@@ -119,12 +152,16 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
for exk, exv := range ee.Extra {
str += fmt.Sprintf(" # [[[ %s ==> %v ]]]\n", exk, exv)
}
indent := ""
for curr := ee; curr != nil; curr = curr.OriginalError {
indent += " "
etype := ee.Type.Key
if ee.Type == TypeWrap {
etype := curr.Type.Key
if curr.Type == TypeWrap {
etype = "~"
}
@@ -168,7 +205,7 @@ func (ee *ExErr) ShortLog(evt *zerolog.Event) {
}
// RecursiveMessage returns the message to show
// = first error (top-down) that is not wrapping/foreign/empty
// = first error (top-down) that is not foreign/empty
// = lowest level error (that is not empty)
// = fallback to self.message
func (ee *ExErr) RecursiveMessage() string {
@@ -176,7 +213,7 @@ func (ee *ExErr) RecursiveMessage() string {
// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty
for curr := ee; curr != nil; curr = curr.OriginalError {
if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign {
if curr.Message != "" && curr.Category != CatForeign {
return curr.Message
}
}
@@ -325,6 +362,22 @@ func (ee *ExErr) GetMetaTime(key string) (time.Time, bool) {
return time.Time{}, false
}
func (ee *ExErr) GetExtra(key string) (any, bool) {
if v, ok := ee.Extra[key]; ok {
return v, true
}
return nil, false
}
func (ee *ExErr) UniqueIDs() []string {
ids := []string{ee.UniqueID}
for curr := ee; curr != nil; curr = curr.OriginalError {
ids = append(ids, curr.UniqueID)
}
return ids
}
// contains test if the supplied error is contained in this error (anywhere in the chain)
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
if original == nil {

View File

@@ -48,6 +48,12 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la
metaJson[metaKey] = metaVal.rawValueForJson()
}
ginJson["meta"] = metaJson
extraJson := langext.H{}
for extraKey, extraVal := range ee.Extra {
extraJson[extraKey] = extraVal
}
ginJson["extra"] = extraJson
}
if applyExtendListener {
@@ -90,6 +96,20 @@ func (ee *ExErr) ToAPIJson(applyExtendListener bool, includeWrappedErrors bool,
apiOutput["__data"] = ee.toJson(0, applyExtendListener, includeMetaFields)
}
for exkey, exval := range ee.Extra {
// ensure we do not override existing values
for {
if _, ok := apiOutput[exkey]; ok {
exkey = "_" + exkey
} else {
break
}
}
apiOutput[exkey] = exval
}
if applyExtendListener {
pkgconfig.ExtendGinOutput(ee, apiOutput)
}

View File

@@ -86,3 +86,41 @@ func MessageMatch(e error, matcher func(string) bool) bool {
return false
}
// OriginalError returns the lowest level error, probably the original/external error that was originally wrapped
func OriginalError(e error) error {
if e == nil {
return nil
}
//goland:noinspection GoTypeAssertionOnErrors
bmerr, ok := e.(*ExErr)
for !ok {
return e
}
for bmerr.OriginalError != nil {
bmerr = bmerr.OriginalError
}
if bmerr.WrappedErr != nil {
if werr, ok := bmerr.WrappedErr.(error); ok {
return werr
}
}
return bmerr
}
func UniqueID(v error) *string {
if v == nil {
return nil
}
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := v.(*ExErr); ok {
return &verr.UniqueID
}
return nil
}

13
exerr/proxy.go Normal file
View File

@@ -0,0 +1,13 @@
package exerr
type Proxy struct {
v ExErr
}
func (p *Proxy) UniqueID() string {
return p.v.UniqueID
}
func (p *Proxy) Get() ExErr {
return p.v
}

View File

@@ -3,13 +3,17 @@ package ginext
import (
"github.com/gin-gonic/gin"
"net/http"
"strings"
)
func CorsMiddleware() gin.HandlerFunc {
func CorsMiddleware(allowheader []string, exposeheader []string) gin.HandlerFunc {
return func(c *gin.Context) {
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
c.Writer.Header().Set("Access-Control-Allow-Credentials", "true")
c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With")
c.Writer.Header().Set("Access-Control-Allow-Headers", strings.Join(allowheader, ", "))
if len(exposeheader) > 0 {
c.Writer.Header().Set("Access-Control-Expose-Headers", strings.Join(exposeheader, ", "))
}
c.Writer.Header().Set("Access-Control-Allow-Methods", "OPTIONS, GET, POST, PUT, PATCH, DELETE, COUNT")
if c.Request.Method == "OPTIONS" {

View File

@@ -21,12 +21,16 @@ type GinWrapper struct {
opt Options
allowCors bool
corsAllowHeader []string
corsExposeHeader []string
ginDebug bool
bufferBody bool
requestTimeout time.Duration
listenerBeforeRequest []func(g *gin.Context)
listenerAfterRequest []func(g *gin.Context, resp HTTPResponse)
buildRequestBindError func(g *gin.Context, fieldtype string, err error) HTTPResponse
routeSpecs []ginRouteSpec
}
@@ -38,19 +42,36 @@ type ginRouteSpec struct {
}
type Options struct {
AllowCors *bool // Add cors handler to allow all CORS requests on the default http methods
GinDebug *bool // Set gin.debug to true (adds more logs)
SuppressGinLogs *bool // Suppress our custom gin logs (even if GinDebug == true)
BufferBody *bool // Buffers the input body stream, this way the ginext error handler can later include the whole request body
Timeout *time.Duration // The default handler timeout
ListenerBeforeRequest []func(g *gin.Context) // Register listener that are called before the handler method
ListenerAfterRequest []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method
DebugTrimHandlerPrefixes []string // Trim these prefixes from the handler names in the debug print
DebugReplaceHandlerNames map[string]string // Replace handler names in debug output
AllowCors *bool // Add cors handler to allow all CORS requests on the default http methods
CorsAllowHeader *[]string // override the default values of Access-Control-Allow-Headers (AllowCors must be true)
CorsExposeHeader *[]string // return Access-Control-Expose-Headers (AllowCors must be true)
GinDebug *bool // Set gin.debug to true (adds more logs)
SuppressGinLogs *bool // Suppress our custom gin logs (even if GinDebug == true)
BufferBody *bool // Buffers the input body stream, this way the ginext error handler can later include the whole request body
Timeout *time.Duration // The default handler timeout
ListenerBeforeRequest []func(g *gin.Context) // Register listener that are called before the handler method
ListenerAfterRequest []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method
DebugTrimHandlerPrefixes []string // Trim these prefixes from the handler names in the debug print
DebugReplaceHandlerNames map[string]string // Replace handler names in debug output
BuildRequestBindError func(g *gin.Context, fieldtype string, err error) HTTPResponse // Override function which generates the HTTPResponse errors that are returned by the preContext..Start() methids
}
// NewEngine creates a new (wrapped) ginEngine
func NewEngine(opt Options) *GinWrapper {
ginDebug := langext.Coalesce(opt.GinDebug, true)
if ginDebug {
gin.SetMode(gin.DebugMode)
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
} else {
gin.SetMode(gin.ReleaseMode)
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
}
engine := gin.New()
wrapper := &GinWrapper{
@@ -58,36 +79,27 @@ func NewEngine(opt Options) *GinWrapper {
opt: opt,
suppressGinLogs: langext.Coalesce(opt.SuppressGinLogs, false),
allowCors: langext.Coalesce(opt.AllowCors, false),
ginDebug: langext.Coalesce(opt.GinDebug, true),
corsAllowHeader: langext.Coalesce(opt.CorsAllowHeader, []string{"Content-Type", "Content-Length", "Accept-Encoding", "X-CSRF-Token", "Authorization", "accept", "origin", "Cache-Control", "X-Requested-With"}),
corsExposeHeader: langext.Coalesce(opt.CorsExposeHeader, []string{}),
ginDebug: ginDebug,
bufferBody: langext.Coalesce(opt.BufferBody, false),
requestTimeout: langext.Coalesce(opt.Timeout, 24*time.Hour),
listenerBeforeRequest: opt.ListenerBeforeRequest,
listenerAfterRequest: opt.ListenerAfterRequest,
buildRequestBindError: langext.Conditional(opt.BuildRequestBindError == nil, defaultBuildRequestBindError, opt.BuildRequestBindError),
}
engine.RedirectFixedPath = false
engine.RedirectTrailingSlash = false
if wrapper.allowCors {
engine.Use(CorsMiddleware())
engine.Use(CorsMiddleware(wrapper.corsAllowHeader, wrapper.corsExposeHeader))
}
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
if !wrapper.ginDebug {
gin.SetMode(gin.ReleaseMode)
if !wrapper.suppressGinLogs {
ginlogger := gin.Logger()
engine.Use(func(context *gin.Context) {
ginlogger(context)
})
}
} else {
gin.SetMode(gin.DebugMode)
if ginDebug && !wrapper.suppressGinLogs {
ginlogger := gin.Logger()
engine.Use(func(context *gin.Context) { ginlogger(context) })
}
return wrapper
}
@@ -216,3 +228,11 @@ func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder {
func (w *GinWrapper) ForwardRequest(writer http.ResponseWriter, req *http.Request) {
w.engine.ServeHTTP(writer, req)
}
func (w *GinWrapper) ListRoutes() []gin.RouteInfo {
return w.engine.Routes()
}
func defaultBuildRequestBindError(g *gin.Context, fieldtype string, err error) HTTPResponse {
return Error(err)
}

9
ginext/jsonFilter.go Normal file
View File

@@ -0,0 +1,9 @@
package ginext
import "github.com/gin-gonic/gin"
var jsonFilterKey = "goext.jsonfilter"
func SetJSONFilter(g *gin.Context, filter string) {
g.Set(jsonFilterKey, filter)
}

View File

@@ -15,16 +15,17 @@ import (
)
type PreContext struct {
ginCtx *gin.Context
wrapper *GinWrapper
uri any
query any
body any
rawbody *[]byte
form any
header any
timeout *time.Duration
persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func
ginCtx *gin.Context
wrapper *GinWrapper
uri any
query any
body any
rawbody *[]byte
form any
header any
timeout *time.Duration
persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func
ignoreWrongContentType bool
}
type preContextData struct {
@@ -71,6 +72,11 @@ func (pctx *PreContext) WithSession(sessionObj SessionObject) *PreContext {
return pctx
}
func (pctx *PreContext) IgnoreWrongContentType() *PreContext {
pctx.ignoreWrongContentType = true
return pctx
}
func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if pctx.uri != nil {
if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil {
@@ -78,7 +84,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailURI).
Str("struct_type", fmt.Sprintf("%T", pctx.uri)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "URI", err))
}
}
@@ -88,24 +94,37 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailQuery).
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "QUERY", err))
}
}
if pctx.body != nil {
if pctx.ginCtx.ContentType() == "application/json" {
if brc, ok := pctx.body.(dataext.BufferedReadCloser); ok {
// Ensures a fully reset (offset=0) buffer before parsing
err := brc.Reset()
if err != nil {
err = exerr.Wrap(err, "Failed to read (brc.reset) json-body").
WithType(exerr.TypeBindFailJSON).
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err))
}
}
if err := pctx.ginCtx.ShouldBindJSON(pctx.body); err != nil {
err = exerr.Wrap(err, "Failed to read json-body").
WithType(exerr.TypeBindFailJSON).
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err))
}
} else {
err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(Error(err))
if !pctx.ignoreWrongContentType {
err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err))
}
}
}
@@ -113,14 +132,14 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok {
v, err := brc.BufferedAll()
if err != nil {
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "BODY", err))
}
*pctx.rawbody = v
} else {
buf := &bytes.Buffer{}
_, err := io.Copy(buf, pctx.ginCtx.Request.Body)
if err != nil {
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "BODY", err))
}
*pctx.rawbody = buf.Bytes()
}
@@ -133,7 +152,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
} else if pctx.ginCtx.ContentType() == "application/x-www-form-urlencoded" {
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
@@ -141,13 +160,15 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
} else {
err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
if !pctx.ignoreWrongContentType {
err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
}
}
@@ -157,7 +178,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailHeader).
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "HEADER", err))
}
}
@@ -169,7 +190,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx)
if err != nil {
actx.Cancel()
return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build()))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "INIT", err))
}
}

View File

@@ -1,13 +1,8 @@
package ginext
import (
"context"
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"os"
)
type cookieval struct {
@@ -41,462 +36,10 @@ type InspectableHTTPResponse interface {
Headers() []string
}
type jsonHTTPResponse struct {
statusCode int
data any
headers []headerval
cookies []cookieval
}
type HTTPErrorResponse interface {
HTTPResponse
func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender {
var f *string
if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" {
f = &jsonfilter
}
return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f}
}
func (j jsonHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Render(j.statusCode, j.jsonRenderer(g))
}
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j jsonHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j jsonHTTPResponse) BodyString(g *gin.Context) *string {
if str, err := j.jsonRenderer(g).RenderString(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonHTTPResponse) ContentType() string {
return "application/json"
}
func (j jsonHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type emptyHTTPResponse struct {
statusCode int
headers []headerval
cookies []cookieval
}
func (j emptyHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Status(j.statusCode)
}
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j emptyHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j emptyHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j emptyHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j emptyHTTPResponse) ContentType() string {
return ""
}
func (j emptyHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type textHTTPResponse struct {
statusCode int
data string
headers []headerval
cookies []cookieval
}
func (j textHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.String(j.statusCode, "%s", j.data)
}
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j textHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j textHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j textHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(j.data)
}
func (j textHTTPResponse) ContentType() string {
return "text/plain"
}
func (j textHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type dataHTTPResponse struct {
statusCode int
data []byte
contentType string
headers []headerval
cookies []cookieval
}
func (j dataHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.contentType, j.data)
}
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j dataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j dataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j dataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j dataHTTPResponse) ContentType() string {
return j.contentType
}
func (j dataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type fileHTTPResponse struct {
mimetype string
filepath string
filename *string
headers []headerval
cookies []cookieval
}
func (j fileHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.File(j.filepath)
}
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j fileHTTPResponse) IsSuccess() bool {
return true
}
func (j fileHTTPResponse) Statuscode() int {
return 200
}
func (j fileHTTPResponse) BodyString(*gin.Context) *string {
data, err := os.ReadFile(j.filepath)
if err != nil {
return nil
}
return langext.Ptr(string(data))
}
func (j fileHTTPResponse) ContentType() string {
return j.mimetype
}
func (j fileHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type downloadDataHTTPResponse struct {
statusCode int
mimetype string
data []byte
filename *string
headers []headerval
cookies []cookieval
}
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.mimetype, j.data)
}
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j downloadDataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j downloadDataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j downloadDataHTTPResponse) ContentType() string {
return j.mimetype
}
func (j downloadDataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type redirectHTTPResponse struct {
statusCode int
url string
headers []headerval
cookies []cookieval
}
func (j redirectHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Redirect(j.statusCode, j.url)
}
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j redirectHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j redirectHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j redirectHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j redirectHTTPResponse) ContentType() string {
return ""
}
func (j redirectHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type jsonAPIErrResponse struct {
err *exerr.ExErr
headers []headerval
cookies []cookieval
}
func (j jsonAPIErrResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
exerr.Get(j.err).Output(context.Background(), g)
j.err.CallListener(exerr.MethodOutput)
}
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonAPIErrResponse) IsSuccess() bool {
return false
}
func (j jsonAPIErrResponse) Statuscode() int {
return langext.Coalesce(j.err.RecursiveStatuscode(), 0)
}
func (j jsonAPIErrResponse) BodyString(*gin.Context) *string {
if str, err := j.err.ToDefaultAPIJson(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonAPIErrResponse) ContentType() string {
return "application/json"
}
func (j jsonAPIErrResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func (j jsonAPIErrResponse) Unwrap() error {
return j.err
}
func Status(sc int) HTTPResponse {
return &emptyHTTPResponse{statusCode: sc}
}
func JSON(sc int, data any) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data}
}
func Data(sc int, contentType string, data []byte) HTTPResponse {
return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data}
}
func Text(sc int, data string) HTTPResponse {
return &textHTTPResponse{statusCode: sc, data: data}
}
func File(mimetype string, filepath string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath}
}
func Download(mimetype string, filepath string, filename string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
}
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
}
func Redirect(sc int, newURL string) HTTPResponse {
return &redirectHTTPResponse{statusCode: sc, url: newURL}
}
func Error(e error) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(e),
}
}
func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
}
Error() error
}
func NotImplemented() HTTPResponse {

58
ginext/responseData.go Normal file
View File

@@ -0,0 +1,58 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type dataHTTPResponse struct {
statusCode int
data []byte
contentType string
headers []headerval
cookies []cookieval
}
func (j dataHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.contentType, j.data)
}
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j dataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j dataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j dataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j dataHTTPResponse) ContentType() string {
return j.contentType
}
func (j dataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Data(sc int, contentType string, data []byte) HTTPResponse {
return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data}
}

View File

@@ -0,0 +1,64 @@
package ginext
import (
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type downloadDataHTTPResponse struct {
statusCode int
mimetype string
data []byte
filename *string
headers []headerval
cookies []cookieval
}
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.mimetype, j.data)
}
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j downloadDataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j downloadDataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j downloadDataHTTPResponse) ContentType() string {
return j.mimetype
}
func (j downloadDataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
}

56
ginext/responseEmpty.go Normal file
View File

@@ -0,0 +1,56 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type emptyHTTPResponse struct {
statusCode int
headers []headerval
cookies []cookieval
}
func (j emptyHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Status(j.statusCode)
}
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j emptyHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j emptyHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j emptyHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j emptyHTTPResponse) ContentType() string {
return ""
}
func (j emptyHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Status(sc int) HTTPResponse {
return &emptyHTTPResponse{statusCode: sc}
}

73
ginext/responseFile.go Normal file
View File

@@ -0,0 +1,73 @@
package ginext
import (
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"os"
)
type fileHTTPResponse struct {
mimetype string
filepath string
filename *string
headers []headerval
cookies []cookieval
}
func (j fileHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.File(j.filepath)
}
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j fileHTTPResponse) IsSuccess() bool {
return true
}
func (j fileHTTPResponse) Statuscode() int {
return 200
}
func (j fileHTTPResponse) BodyString(*gin.Context) *string {
data, err := os.ReadFile(j.filepath)
if err != nil {
return nil
}
return langext.Ptr(string(data))
}
func (j fileHTTPResponse) ContentType() string {
return j.mimetype
}
func (j fileHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func File(mimetype string, filepath string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath}
}
func Download(mimetype string, filepath string, filename string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
}

78
ginext/responseJson.go Normal file
View File

@@ -0,0 +1,78 @@
package ginext
import (
"github.com/gin-gonic/gin"
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type jsonHTTPResponse struct {
statusCode int
data any
headers []headerval
cookies []cookieval
filterOverride *string
}
func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender {
var f *string
if jsonfilter := g.GetString(jsonFilterKey); jsonfilter != "" {
f = &jsonfilter
}
if j.filterOverride != nil {
f = j.filterOverride
}
return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f}
}
func (j jsonHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Render(j.statusCode, j.jsonRenderer(g))
}
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j jsonHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j jsonHTTPResponse) BodyString(g *gin.Context) *string {
if str, err := j.jsonRenderer(g).RenderString(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonHTTPResponse) ContentType() string {
return "application/json"
}
func (j jsonHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func JSON(sc int, data any) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data}
}
func JSONWithFilter(sc int, data any, f string) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data, filterOverride: &f}
}

81
ginext/responseJsonAPI.go Normal file
View File

@@ -0,0 +1,81 @@
package ginext
import (
"context"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type jsonAPIErrResponse struct {
err *exerr.ExErr
headers []headerval
cookies []cookieval
}
func (j jsonAPIErrResponse) Error() error {
return j.err
}
func (j jsonAPIErrResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
exerr.Get(j.err).Output(context.Background(), g)
j.err.CallListener(exerr.MethodOutput)
}
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonAPIErrResponse) IsSuccess() bool {
return false
}
func (j jsonAPIErrResponse) Statuscode() int {
return langext.Coalesce(j.err.RecursiveStatuscode(), 0)
}
func (j jsonAPIErrResponse) BodyString(*gin.Context) *string {
if str, err := j.err.ToDefaultAPIJson(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonAPIErrResponse) ContentType() string {
return "application/json"
}
func (j jsonAPIErrResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func (j jsonAPIErrResponse) Unwrap() error {
return j.err
}
func Error(e error) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(e),
}
}
func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
}
}

View File

@@ -0,0 +1,57 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type redirectHTTPResponse struct {
statusCode int
url string
headers []headerval
cookies []cookieval
}
func (j redirectHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Redirect(j.statusCode, j.url)
}
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j redirectHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j redirectHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j redirectHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j redirectHTTPResponse) ContentType() string {
return ""
}
func (j redirectHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Redirect(sc int, newURL string) HTTPResponse {
return &redirectHTTPResponse{statusCode: sc, url: newURL}
}

View File

@@ -0,0 +1,72 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"io"
"net/http"
"time"
)
type seekableResponse struct {
data io.ReadSeeker
contentType string
filename string
headers []headerval
cookies []cookieval
}
func (j seekableResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.contentType) // if we don't set it here http.ServeContent does weird sniffing later...
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
http.ServeContent(g.Writer, g.Request, j.filename, time.Unix(0, 0), j.data)
if clsr, ok := j.data.(io.ReadSeekCloser); ok {
err := clsr.Close()
if err != nil {
exerr.Wrap(err, "failed to close io.ReadSeerkClose in ginext.Seekable").Str("filename", j.filename).Print()
}
}
}
func (j seekableResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j seekableResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j seekableResponse) IsSuccess() bool {
return true
}
func (j seekableResponse) Statuscode() int {
return 200
}
func (j seekableResponse) BodyString(*gin.Context) *string {
return langext.Ptr("(seekable)")
}
func (j seekableResponse) ContentType() string {
return j.contentType
}
func (j seekableResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Seekable(filename string, contentType string, data io.ReadSeeker) HTTPResponse {
return &seekableResponse{filename: filename, contentType: contentType, data: data}
}

57
ginext/responseText.go Normal file
View File

@@ -0,0 +1,57 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type textHTTPResponse struct {
statusCode int
data string
headers []headerval
cookies []cookieval
}
func (j textHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.String(j.statusCode, "%s", j.data)
}
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j textHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j textHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j textHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(j.data)
}
func (j textHTTPResponse) ContentType() string {
return "text/plain"
}
func (j textHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Text(sc int, data string) HTTPResponse {
return &textHTTPResponse{statusCode: sc, data: data}
}

View File

@@ -57,7 +57,7 @@ func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper
}
func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper {
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
return w.Use(func(g *gin.Context) { g.Set(jsonFilterKey, filter) })
}
func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder {
@@ -112,7 +112,7 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
}
func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder {
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
return w.Use(func(g *gin.Context) { g.Set(jsonFilterKey, filter) })
}
func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {

50
go.mod
View File

@@ -1,63 +1,61 @@
module gogs.mikescher.com/BlackForestBytes/goext
go 1.22
go 1.23
require (
github.com/gin-gonic/gin v1.10.0
github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.-
github.com/jmoiron/sqlx v1.4.0
github.com/rs/xid v1.5.0
github.com/rs/zerolog v1.32.0
go.mongodb.org/mongo-driver v1.15.0
golang.org/x/crypto v0.23.0
golang.org/x/sys v0.20.0
golang.org/x/term v0.20.0
github.com/rs/xid v1.6.0
github.com/rs/zerolog v1.33.0
go.mongodb.org/mongo-driver v1.17.2
golang.org/x/crypto v0.32.0
golang.org/x/sys v0.29.0
golang.org/x/term v0.28.0
)
require (
github.com/disintegration/imaging v1.6.2
github.com/jung-kurt/gofpdf v1.16.2
golang.org/x/sync v0.7.0
golang.org/x/sync v0.10.0
)
require (
github.com/bytedance/sonic v1.11.6 // indirect
github.com/bytedance/sonic/loader v0.1.1 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/chenzhuoyu/iasm v0.9.1 // indirect
github.com/bytedance/sonic v1.12.7 // indirect
github.com/bytedance/sonic/loader v0.2.2 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/gin-contrib/sse v1.0.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.20.0 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/go-playground/validator/v10 v10.23.0 // indirect
github.com/goccy/go-json v0.10.4 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/uuid v1.5.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.17.8 // indirect
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/klauspost/cpuid/v2 v2.2.9 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 // indirect
golang.org/x/arch v0.8.0 // indirect
golang.org/x/image v0.16.0 // indirect
golang.org/x/net v0.25.0 // indirect
golang.org/x/text v0.15.0 // indirect
google.golang.org/protobuf v1.34.1 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
golang.org/x/arch v0.13.0 // indirect
golang.org/x/image v0.23.0 // indirect
golang.org/x/net v0.34.0 // indirect
golang.org/x/text v0.21.0 // indirect
google.golang.org/protobuf v1.36.2 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
modernc.org/libc v1.37.6 // indirect
modernc.org/mathutil v1.6.0 // indirect

320
go.sum
View File

@@ -1,42 +1,25 @@
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo=
github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.1 h1:JC0+6c9FoWYYxakaoa+c5QTtJeiSZNeByOBhXtAFSn4=
github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A=
github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.3 h1:jRN+yEjakWh8aK5FzrciUHG8OFXK+4/KrAX/ysEtHAA=
github.com/bytedance/sonic v1.11.3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.4 h1:8+OMLSSDDm2/qJc6ld5K5Sm62NK9VHcUKk0NzBoMAM4=
github.com/bytedance/sonic v1.11.4/go.mod h1:YrWEqYtlBPS6LUA0vpuG79a1trsh4Ae41uWUWUreHhE=
github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k=
github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw=
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY=
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
github.com/bytedance/sonic v1.12.3 h1:W2MGa7RCU1QTeYRTPE3+88mVC0yXmsRQRChiyVocVjU=
github.com/bytedance/sonic v1.12.3/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k=
github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.5 h1:hoZxY8uW+mT+OpkcUWw4k0fDINtOcVavEsGfzwzFU/w=
github.com/bytedance/sonic v1.12.5/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.6 h1:/isNmCUF2x3Sh8RAp/4mh4ZGkcFAX/hLrzrK3AvpRzk=
github.com/bytedance/sonic v1.12.6/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.7 h1:CQU8pxOy9HToxhndH0Kx/S1qU/CuS9GnKYrGioDcU1Q=
github.com/bytedance/sonic v1.12.7/go.mod h1:tnbal4mxOMju17EGfknm2XyYcpyCnIROYOEYuemj13I=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/cloudwego/base64x v0.1.0 h1:Tg5q9tq1khq9Y9UwfoC6zkHK0FypN2GLDvhqFceOL8U=
github.com/cloudwego/base64x v0.1.0/go.mod h1:lM8nFiNbg74QgesNo6EAtv8N9tlRjBWExmHoNDa3PkU=
github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg=
github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8=
github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM=
github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E=
github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.2 h1:jxAJuN9fOot/cyz5Q6dUuMJF5OqQ6+5GfA8FjjQ0R4o=
github.com/bytedance/sonic/loader v0.2.2/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.0.9/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/cloudwego/iasm v0.1.0 h1:q0OuhwWDMyi3nlrQ6kIr0Yx0c3FI6cq/OZWKodIDdz8=
github.com/cloudwego/iasm v0.1.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
@@ -47,12 +30,18 @@ github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4=
github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4=
github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc=
github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc=
github.com/gabriel-vasile/mimetype v1.4.7 h1:SKFKl7kD0RiPdbht0s7hFtjl489WcQ1VyPW8ZzUMYCA=
github.com/gabriel-vasile/mimetype v1.4.7/go.mod h1:GDlAgAyIRT27BhFl53XNAFtfjzOkLaF35JdEG0P7LtU=
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E=
github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
@@ -63,37 +52,26 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE=
github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74=
github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U=
github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4=
github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o=
github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM=
github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU=
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
@@ -101,54 +79,39 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm
github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc=
github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg=
github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU=
github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0=
github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY=
github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI=
github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo=
github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg=
github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@@ -156,12 +119,11 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0=
github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
@@ -173,11 +135,10 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
@@ -188,74 +149,63 @@ github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 h1:tBiBTKHnIjovYoLX/TPkcf+OjqqKGQrPtGT3Foz+Pgo=
github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76/go.mod h1:SQliXeA7Dhkt//vS29v3zpbEwoa+zb2Cn5xj5uO4K5U=
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk=
go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo=
go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80=
go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
go.mongodb.org/mongo-driver v1.15.0 h1:rJCKC8eEliewXjZGf0ddURtl7tTVy1TK3bfl0gkUSLc=
go.mongodb.org/mongo-driver v1.15.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
go.mongodb.org/mongo-driver v1.17.1 h1:Wic5cJIwJgSpBhe3lx3+/RybR5PiYRMpVFgO7cOHyIM=
go.mongodb.org/mongo-driver v1.17.1/go.mod h1:wwWm/+BuOddhcq3n68LKRmgk2wXzmF6s0SFOa0GINL4=
go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793SqyhzM=
go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
golang.org/x/arch v0.11.0 h1:KXV8WWKCXm6tRpLirl2szsO5j/oOODwZf4hATmGVNs4=
golang.org/x/arch v0.11.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/arch v0.12.0 h1:UsYJhbzPYGsT0HbEdmYcqtCv8UNGvnaL561NnIUvaKg=
golang.org/x/arch v0.12.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/arch v0.13.0 h1:KCkqVVV1kGg0X87TFysjCJ8MxtZEIU4Ja/yXGeoECdA=
golang.org/x/arch v0.13.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg=
golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY=
golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw=
golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs=
golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s=
golang.org/x/image v0.21.0/go.mod h1:vUbsLavqK/W303ZroQQVKQ+Af3Yl6Uz1Ppu5J/cLz78=
golang.org/x/image v0.22.0 h1:UtK5yLUzilVrkjMAZAZ34DXGpASN8i8pj8g+O+yd10g=
golang.org/x/image v0.22.0/go.mod h1:9hPFhljd4zZ1GNSIZJ49sqbp45GKK9t6w+iXvGqZUz4=
golang.org/x/image v0.23.0 h1:HseQ7c2OpPKTPVzNjG5fwJsOTCiiwS4QdsYi5XU6H68=
golang.org/x/image v0.23.0/go.mod h1:wJJBTdLfCCf3tiHa1fNxpZmUI4mmoZvwMCPP0ddoNKY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4=
golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo=
golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM=
golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI=
golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -263,53 +213,48 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE=
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24=
golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU=
golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E=
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.34.0 h1:Qo/qEd2RZPCf2nKuorzksSknv0d3ERwp1vFG38gSmH4=
google.golang.org/protobuf v1.34.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA=
google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io=
google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk=
google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.36.2 h1:R8FeyR1/eLmkutZOM5CWghmo5itiG9z0ktFlTVLuTmU=
google.golang.org/protobuf v1.36.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@@ -324,4 +269,3 @@ modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E=
modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ=
modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@@ -1,5 +1,5 @@
package goext
const GoextVersion = "0.0.456"
const GoextVersion = "0.0.558"
const GoextVersionTimestamp = "2024-05-18T23:38:47+0200"
const GoextVersionTimestamp = "2025-01-10T15:36:23+0100"

View File

@@ -1,27 +0,0 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -4,9 +4,12 @@ JSON serializer which serializes nil-Arrays as `[]` and nil-maps als `{}`.
Idea from: https://github.com/homelight/json
Forked from https://github.com/golang/go/tree/547e8e22fe565d65d1fd4d6e71436a5a855447b0/src/encoding/json ( tag go1.20.2 )
Forked from https://github.com/golang/go/tree/194de8fbfaf4c3ed54e1a3c1b14fc67a830b8d95/src/encoding/json ( tag go1.23.4 )
-> https://github.com/golang/go/tree/go1.23.4/src/encoding/json
Added:
- `MarshalSafeCollections()` method
- `Encoder.nilSafeSlices` and `Encoder.nilSafeMaps` fields
- `Encoder.nilSafeSlices` and `Encoder.nilSafeMaps` fields
- `Add 'tagkey' to use different key than json (set on Decoder struct)`
- `Add 'jsonfilter' to filter printed fields (set via MarshalSafeCollections)`

View File

@@ -17,14 +17,15 @@ import (
"unicode"
"unicode/utf16"
"unicode/utf8"
_ "unsafe" // for linkname
)
// Unmarshal parses the JSON-encoded data and stores the result
// in the value pointed to by v. If v is nil or not a pointer,
// Unmarshal returns an InvalidUnmarshalError.
// Unmarshal returns an [InvalidUnmarshalError].
//
// Unmarshal uses the inverse of the encodings that
// Marshal uses, allocating maps, slices, and pointers as necessary,
// [Marshal] uses, allocating maps, slices, and pointers as necessary,
// with the following additional rules:
//
// To unmarshal JSON into a pointer, Unmarshal first handles the case of
@@ -33,28 +34,28 @@ import (
// the value pointed at by the pointer. If the pointer is nil, Unmarshal
// allocates a new value for it to point to.
//
// To unmarshal JSON into a value implementing the Unmarshaler interface,
// Unmarshal calls that value's UnmarshalJSON method, including
// To unmarshal JSON into a value implementing [Unmarshaler],
// Unmarshal calls that value's [Unmarshaler.UnmarshalJSON] method, including
// when the input is a JSON null.
// Otherwise, if the value implements encoding.TextUnmarshaler
// and the input is a JSON quoted string, Unmarshal calls that value's
// UnmarshalText method with the unquoted form of the string.
// Otherwise, if the value implements [encoding.TextUnmarshaler]
// and the input is a JSON quoted string, Unmarshal calls
// [encoding.TextUnmarshaler.UnmarshalText] with the unquoted form of the string.
//
// To unmarshal JSON into a struct, Unmarshal matches incoming object
// keys to the keys used by Marshal (either the struct field name or its tag),
// keys to the keys used by [Marshal] (either the struct field name or its tag),
// preferring an exact match but also accepting a case-insensitive match. By
// default, object keys which don't have a corresponding struct field are
// ignored (see Decoder.DisallowUnknownFields for an alternative).
// ignored (see [Decoder.DisallowUnknownFields] for an alternative).
//
// To unmarshal JSON into an interface value,
// Unmarshal stores one of these in the interface value:
//
// bool, for JSON booleans
// float64, for JSON numbers
// string, for JSON strings
// []interface{}, for JSON arrays
// map[string]interface{}, for JSON objects
// nil for JSON null
// - bool, for JSON booleans
// - float64, for JSON numbers
// - string, for JSON strings
// - []interface{}, for JSON arrays
// - map[string]interface{}, for JSON objects
// - nil for JSON null
//
// To unmarshal a JSON array into a slice, Unmarshal resets the slice length
// to zero and then appends each element to the slice.
@@ -72,16 +73,15 @@ import (
// use. If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal
// reuses the existing map, keeping existing entries. Unmarshal then stores
// key-value pairs from the JSON object into the map. The map's key type must
// either be any string type, an integer, implement json.Unmarshaler, or
// implement encoding.TextUnmarshaler.
// either be any string type, an integer, or implement [encoding.TextUnmarshaler].
//
// If the JSON-encoded data contain a syntax error, Unmarshal returns a SyntaxError.
// If the JSON-encoded data contain a syntax error, Unmarshal returns a [SyntaxError].
//
// If a JSON value is not appropriate for a given target type,
// or if a JSON number overflows the target type, Unmarshal
// skips that field and completes the unmarshaling as best it can.
// If no more serious errors are encountered, Unmarshal returns
// an UnmarshalTypeError describing the earliest such error. In any
// an [UnmarshalTypeError] describing the earliest such error. In any
// case, it's not guaranteed that all the remaining fields following
// the problematic one will be unmarshaled into the target object.
//
@@ -114,7 +114,7 @@ func Unmarshal(data []byte, v any) error {
// a JSON value. UnmarshalJSON must copy the JSON data
// if it wishes to retain the data after returning.
//
// By convention, to approximate the behavior of Unmarshal itself,
// By convention, to approximate the behavior of [Unmarshal] itself,
// Unmarshalers implement UnmarshalJSON([]byte("null")) as a no-op.
type Unmarshaler interface {
UnmarshalJSON([]byte) error
@@ -151,8 +151,8 @@ func (e *UnmarshalFieldError) Error() string {
return "json: cannot unmarshal object key " + strconv.Quote(e.Key) + " into unexported field " + e.Field.Name + " of type " + e.Type.String()
}
// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
// (The argument to Unmarshal must be a non-nil pointer.)
// An InvalidUnmarshalError describes an invalid argument passed to [Unmarshal].
// (The argument to [Unmarshal] must be a non-nil pointer.)
type InvalidUnmarshalError struct {
Type reflect.Type
}
@@ -541,17 +541,10 @@ func (d *decodeState) array(v reflect.Value) error {
break
}
// Get element of array, growing if necessary.
// Expand slice length, growing the slice if necessary.
if v.Kind() == reflect.Slice {
// Grow slice if necessary
if i >= v.Cap() {
newcap := v.Cap() + v.Cap()/2
if newcap < 4 {
newcap = 4
}
newv := reflect.MakeSlice(v.Type(), v.Len(), newcap)
reflect.Copy(newv, v)
v.Set(newv)
v.Grow(1)
}
if i >= v.Len() {
v.SetLen(i + 1)
@@ -585,13 +578,11 @@ func (d *decodeState) array(v reflect.Value) error {
if i < v.Len() {
if v.Kind() == reflect.Array {
// Array. Zero the rest.
z := reflect.Zero(v.Type().Elem())
for ; i < v.Len(); i++ {
v.Index(i).Set(z)
v.Index(i).SetZero() // zero remainder of array
}
} else {
v.SetLen(i)
v.SetLen(i) // truncate the slice
}
}
if i == 0 && v.Kind() == reflect.Slice {
@@ -601,7 +592,7 @@ func (d *decodeState) array(v reflect.Value) error {
}
var nullLiteral = []byte("null")
var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
var textUnmarshalerType = reflect.TypeFor[encoding.TextUnmarshaler]()
// object consumes an object from d.data[d.off-1:], decoding into v.
// The first byte ('{') of the object has been read already.
@@ -700,24 +691,13 @@ func (d *decodeState) object(v reflect.Value) error {
if !mapElem.IsValid() {
mapElem = reflect.New(elemType).Elem()
} else {
mapElem.Set(reflect.Zero(elemType))
mapElem.SetZero()
}
subv = mapElem
} else {
var f *field
if i, ok := fields.nameIndex[string(key)]; ok {
// Found an exact name match.
f = &fields.list[i]
} else {
// Fall back to the expensive case-insensitive
// linear search.
for i := range fields.list {
ff := &fields.list[i]
if ff.equalFold(ff.nameBytes, key) {
f = ff
break
}
}
f := fields.byExactName[string(key)]
if f == nil {
f = fields.byFoldedName[string(foldName(key))]
}
if f != nil {
subv = v
@@ -787,33 +767,35 @@ func (d *decodeState) object(v reflect.Value) error {
if v.Kind() == reflect.Map {
kt := t.Key()
var kv reflect.Value
switch {
case reflect.PointerTo(kt).Implements(textUnmarshalerType):
if reflect.PointerTo(kt).Implements(textUnmarshalerType) {
kv = reflect.New(kt)
if err := d.literalStore(item, kv, true); err != nil {
return err
}
kv = kv.Elem()
case kt.Kind() == reflect.String:
kv = reflect.ValueOf(key).Convert(kt)
default:
} else {
switch kt.Kind() {
case reflect.String:
kv = reflect.New(kt).Elem()
kv.SetString(string(key))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
s := string(key)
n, err := strconv.ParseInt(s, 10, 64)
if err != nil || reflect.Zero(kt).OverflowInt(n) {
if err != nil || kt.OverflowInt(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: kt, Offset: int64(start + 1)})
break
}
kv = reflect.ValueOf(n).Convert(kt)
kv = reflect.New(kt).Elem()
kv.SetInt(n)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
s := string(key)
n, err := strconv.ParseUint(s, 10, 64)
if err != nil || reflect.Zero(kt).OverflowUint(n) {
if err != nil || kt.OverflowUint(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: kt, Offset: int64(start + 1)})
break
}
kv = reflect.ValueOf(n).Convert(kt)
kv = reflect.New(kt).Elem()
kv.SetUint(n)
default:
panic("json: Unexpected key type") // should never occur
}
@@ -852,12 +834,12 @@ func (d *decodeState) convertNumber(s string) (any, error) {
}
f, err := strconv.ParseFloat(s, 64)
if err != nil {
return nil, &UnmarshalTypeError{Value: "number " + s, Type: reflect.TypeOf(0.0), Offset: int64(d.off)}
return nil, &UnmarshalTypeError{Value: "number " + s, Type: reflect.TypeFor[float64](), Offset: int64(d.off)}
}
return f, nil
}
var numberType = reflect.TypeOf(Number(""))
var numberType = reflect.TypeFor[Number]()
// literalStore decodes a literal stored in item into v.
//
@@ -867,7 +849,7 @@ var numberType = reflect.TypeOf(Number(""))
func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool) error {
// Check for unmarshaler.
if len(item) == 0 {
//Empty string given
// Empty string given.
d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
return nil
}
@@ -914,7 +896,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
}
switch v.Kind() {
case reflect.Interface, reflect.Pointer, reflect.Map, reflect.Slice:
v.Set(reflect.Zero(v.Type()))
v.SetZero()
// otherwise, ignore null for primitives/string
}
case 't', 'f': // true, false
@@ -966,10 +948,11 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
}
v.SetBytes(b[:n])
case reflect.String:
if v.Type() == numberType && !isValidNumber(string(s)) {
t := string(s)
if v.Type() == numberType && !isValidNumber(t) {
return fmt.Errorf("json: invalid number literal, trying to unmarshal %q into Number", item)
}
v.SetString(string(s))
v.SetString(t)
case reflect.Interface:
if v.NumMethod() == 0 {
v.Set(reflect.ValueOf(string(s)))
@@ -985,13 +968,12 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
}
panic(phasePanicMsg)
}
s := string(item)
switch v.Kind() {
default:
if v.Kind() == reflect.String && v.Type() == numberType {
// s must be a valid number, because it's
// already been tokenized.
v.SetString(s)
v.SetString(string(item))
break
}
if fromQuoted {
@@ -999,7 +981,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
}
d.saveError(&UnmarshalTypeError{Value: "number", Type: v.Type(), Offset: int64(d.readIndex())})
case reflect.Interface:
n, err := d.convertNumber(s)
n, err := d.convertNumber(string(item))
if err != nil {
d.saveError(err)
break
@@ -1011,25 +993,25 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool
v.Set(reflect.ValueOf(n))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
n, err := strconv.ParseInt(s, 10, 64)
n, err := strconv.ParseInt(string(item), 10, 64)
if err != nil || v.OverflowInt(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())})
d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())})
break
}
v.SetInt(n)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
n, err := strconv.ParseUint(s, 10, 64)
n, err := strconv.ParseUint(string(item), 10, 64)
if err != nil || v.OverflowUint(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())})
d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())})
break
}
v.SetUint(n)
case reflect.Float32, reflect.Float64:
n, err := strconv.ParseFloat(s, v.Type().Bits())
n, err := strconv.ParseFloat(string(item), v.Type().Bits())
if err != nil || v.OverflowFloat(n) {
d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())})
d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())})
break
}
v.SetFloat(n)
@@ -1201,6 +1183,15 @@ func unquote(s []byte) (t string, ok bool) {
return
}
// unquoteBytes should be an internal detail,
// but widely used packages access it using linkname.
// Notable members of the hall of shame include:
// - github.com/bytedance/sonic
//
// Do not remove or change the type signature.
// See go.dev/issue/67401.
//
//go:linkname unquoteBytes
func unquoteBytes(s []byte) (t []byte, ok bool) {
if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' {
return

File diff suppressed because it is too large Load Diff

View File

@@ -12,45 +12,48 @@ package json
import (
"bytes"
"cmp"
"encoding"
"encoding/base64"
"fmt"
"math"
"reflect"
"sort"
"slices"
"strconv"
"strings"
"sync"
"unicode"
"unicode/utf8"
_ "unsafe" // for linkname
)
// Marshal returns the JSON encoding of v.
//
// Marshal traverses the value v recursively.
// If an encountered value implements the Marshaler interface
// and is not a nil pointer, Marshal calls its MarshalJSON method
// to produce JSON. If no MarshalJSON method is present but the
// value implements encoding.TextMarshaler instead, Marshal calls
// its MarshalText method and encodes the result as a JSON string.
// If an encountered value implements [Marshaler]
// and is not a nil pointer, Marshal calls [Marshaler.MarshalJSON]
// to produce JSON. If no [Marshaler.MarshalJSON] method is present but the
// value implements [encoding.TextMarshaler] instead, Marshal calls
// [encoding.TextMarshaler.MarshalText] and encodes the result as a JSON string.
// The nil pointer exception is not strictly necessary
// but mimics a similar, necessary exception in the behavior of
// UnmarshalJSON.
// [Unmarshaler.UnmarshalJSON].
//
// Otherwise, Marshal uses the following type-dependent default encodings:
//
// Boolean values encode as JSON booleans.
//
// Floating point, integer, and Number values encode as JSON numbers.
// Floating point, integer, and [Number] values encode as JSON numbers.
// NaN and +/-Inf values will return an [UnsupportedValueError].
//
// String values encode as JSON strings coerced to valid UTF-8,
// replacing invalid bytes with the Unicode replacement rune.
// So that the JSON will be safe to embed inside HTML <script> tags,
// the string is encoded using HTMLEscape,
// the string is encoded using [HTMLEscape],
// which replaces "<", ">", "&", U+2028, and U+2029 are escaped
// to "\u003c","\u003e", "\u0026", "\u2028", and "\u2029".
// This replacement can be disabled when using an Encoder,
// by calling SetEscapeHTML(false).
// This replacement can be disabled when using an [Encoder],
// by calling [Encoder.SetEscapeHTML](false).
//
// Array and slice values encode as JSON arrays, except that
// []byte encodes as a base64-encoded string, and a nil slice
@@ -107,7 +110,7 @@ import (
// only Unicode letters, digits, and ASCII punctuation except quotation
// marks, backslash, and comma.
//
// Anonymous struct fields are usually marshaled as if their inner exported fields
// Embedded struct fields are usually marshaled as if their inner exported fields
// were fields in the outer struct, subject to the usual Go visibility rules amended
// as described in the next paragraph.
// An anonymous struct field with a name given in its JSON tag is treated as
@@ -134,11 +137,11 @@ import (
// a JSON tag of "-".
//
// Map values encode as JSON objects. The map's key type must either be a
// string, an integer type, or implement encoding.TextMarshaler. The map keys
// string, an integer type, or implement [encoding.TextMarshaler]. The map keys
// are sorted and used as JSON object keys by applying the following rules,
// subject to the UTF-8 coercion described for string values above:
// - keys of any string type are used directly
// - encoding.TextMarshalers are marshaled
// - keys that implement [encoding.TextMarshaler] are marshaled
// - integer keys are converted to strings
//
// Pointer values encode as the value pointed to.
@@ -149,13 +152,14 @@ import (
//
// Channel, complex, and function values cannot be encoded in JSON.
// Attempting to encode such a value causes Marshal to return
// an UnsupportedTypeError.
// an [UnsupportedTypeError].
//
// JSON cannot represent cyclic data structures and Marshal does not
// handle them. Passing cyclic structures to Marshal will result in
// an error.
func Marshal(v any) ([]byte, error) {
e := newEncodeState()
defer encodeStatePool.Put(e)
err := e.marshal(v, encOpts{escapeHTML: true})
if err != nil {
@@ -163,8 +167,6 @@ func Marshal(v any) ([]byte, error) {
}
buf := append([]byte(nil), e.Bytes()...)
encodeStatePool.Put(e)
return buf, nil
}
@@ -194,7 +196,7 @@ func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool,
}
}
// MarshalIndent is like Marshal but applies Indent to format the output.
// MarshalIndent is like [Marshal] but applies [Indent] to format the output.
// Each JSON element in the output will begin on a new line beginning with prefix
// followed by one or more copies of indent according to the indentation nesting.
func MarshalIndent(v any, prefix, indent string) ([]byte, error) {
@@ -202,47 +204,12 @@ func MarshalIndent(v any, prefix, indent string) ([]byte, error) {
if err != nil {
return nil, err
}
var buf bytes.Buffer
err = Indent(&buf, b, prefix, indent)
b2 := make([]byte, 0, indentGrowthFactor*len(b))
b2, err = appendIndent(b2, b, prefix, indent)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
// so that the JSON will be safe to embed inside HTML <script> tags.
// For historical reasons, web browsers don't honor standard HTML
// escaping within <script> tags, so an alternative JSON encoding must
// be used.
func HTMLEscape(dst *bytes.Buffer, src []byte) {
// The characters can only appear in string literals,
// so just scan the string one byte at a time.
start := 0
for i, c := range src {
if c == '<' || c == '>' || c == '&' {
if start < i {
dst.Write(src[start:i])
}
dst.WriteString(`\u00`)
dst.WriteByte(hex[c>>4])
dst.WriteByte(hex[c&0xF])
start = i + 1
}
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
if start < i {
dst.Write(src[start:i])
}
dst.WriteString(`\u202`)
dst.WriteByte(hex[src[i+2]&0xF])
start = i + 3
}
}
if start < len(src) {
dst.Write(src[start:])
}
return b2, nil
}
// Marshaler is the interface implemented by types that
@@ -251,7 +218,7 @@ type Marshaler interface {
MarshalJSON() ([]byte, error)
}
// An UnsupportedTypeError is returned by Marshal when attempting
// An UnsupportedTypeError is returned by [Marshal] when attempting
// to encode an unsupported value type.
type UnsupportedTypeError struct {
Type reflect.Type
@@ -261,7 +228,7 @@ func (e *UnsupportedTypeError) Error() string {
return "json: unsupported type: " + e.Type.String()
}
// An UnsupportedValueError is returned by Marshal when attempting
// An UnsupportedValueError is returned by [Marshal] when attempting
// to encode an unsupported value.
type UnsupportedValueError struct {
Value reflect.Value
@@ -272,9 +239,9 @@ func (e *UnsupportedValueError) Error() string {
return "json: unsupported value: " + e.Str
}
// Before Go 1.2, an InvalidUTF8Error was returned by Marshal when
// Before Go 1.2, an InvalidUTF8Error was returned by [Marshal] when
// attempting to encode a string value with invalid UTF-8 sequences.
// As of Go 1.2, Marshal instead coerces the string to valid UTF-8 by
// As of Go 1.2, [Marshal] instead coerces the string to valid UTF-8 by
// replacing invalid bytes with the Unicode replacement rune U+FFFD.
//
// Deprecated: No longer used; kept for compatibility.
@@ -286,7 +253,8 @@ func (e *InvalidUTF8Error) Error() string {
return "json: invalid UTF-8 in string: " + strconv.Quote(e.S)
}
// A MarshalerError represents an error from calling a MarshalJSON or MarshalText method.
// A MarshalerError represents an error from calling a
// [Marshaler.MarshalJSON] or [encoding.TextMarshaler.MarshalText] method.
type MarshalerError struct {
Type reflect.Type
Err error
@@ -306,12 +274,11 @@ func (e *MarshalerError) Error() string {
// Unwrap returns the underlying error.
func (e *MarshalerError) Unwrap() error { return e.Err }
var hex = "0123456789abcdef"
const hex = "0123456789abcdef"
// An encodeState encodes JSON into a bytes.Buffer.
type encodeState struct {
bytes.Buffer // accumulated output
scratch [64]byte
// Keep track of what pointers we've seen in the current recursive call
// path, to avoid cycles that could lead to a stack overflow. Only do
@@ -367,16 +334,12 @@ func isEmptyValue(v reflect.Value) bool {
switch v.Kind() {
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
return v.Len() == 0
case reflect.Bool:
return !v.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v.Uint() == 0
case reflect.Float32, reflect.Float64:
return v.Float() == 0
case reflect.Interface, reflect.Pointer:
return v.IsNil()
case reflect.Bool,
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr,
reflect.Float32, reflect.Float64,
reflect.Interface, reflect.Pointer:
return v.IsZero()
}
return false
}
@@ -386,7 +349,6 @@ func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) {
if opts.tagkey != nil {
tagkey = *opts.tagkey
}
valueEncoder(v, tagkey)(e, v, opts)
}
@@ -418,7 +380,7 @@ func valueEncoder(v reflect.Value, tagkey string) encoderFunc {
}
func typeEncoder(t reflect.Type, tagkey string) encoderFunc {
if fi, ok := encoderCache.Load(t); ok {
if fi, ok := encoderCache.Load(TagKeyTypeKey{t, tagkey}); ok {
return fi.(encoderFunc)
}
@@ -431,7 +393,7 @@ func typeEncoder(t reflect.Type, tagkey string) encoderFunc {
f encoderFunc
)
wg.Add(1)
fi, loaded := encoderCache.LoadOrStore(t, encoderFunc(func(e *encodeState, v reflect.Value, opts encOpts) {
fi, loaded := encoderCache.LoadOrStore(TagKeyTypeKey{t, tagkey}, encoderFunc(func(e *encodeState, v reflect.Value, opts encOpts) {
wg.Wait()
f(e, v, opts)
}))
@@ -442,13 +404,13 @@ func typeEncoder(t reflect.Type, tagkey string) encoderFunc {
// Compute the real encoder and replace the indirect func with it.
f = newTypeEncoder(t, true, tagkey)
wg.Done()
encoderCache.Store(t, f)
encoderCache.Store(TagKeyTypeKey{t, tagkey}, f)
return f
}
var (
marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem()
textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
marshalerType = reflect.TypeFor[Marshaler]()
textMarshalerType = reflect.TypeFor[encoding.TextMarshaler]()
)
// newTypeEncoder constructs an encoderFunc for a type.
@@ -517,8 +479,10 @@ func marshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) {
}
b, err := m.MarshalJSON()
if err == nil {
// copy JSON into buffer, checking validity.
err = compact(&e.Buffer, b, opts.escapeHTML)
e.Grow(len(b))
out := e.AvailableBuffer()
out, err = appendCompact(out, b, opts.escapeHTML)
e.Buffer.Write(out)
}
if err != nil {
e.error(&MarshalerError{v.Type(), err, "MarshalJSON"})
@@ -534,8 +498,10 @@ func addrMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) {
m := va.Interface().(Marshaler)
b, err := m.MarshalJSON()
if err == nil {
// copy JSON into buffer, checking validity.
err = compact(&e.Buffer, b, opts.escapeHTML)
e.Grow(len(b))
out := e.AvailableBuffer()
out, err = appendCompact(out, b, opts.escapeHTML)
e.Buffer.Write(out)
}
if err != nil {
e.error(&MarshalerError{v.Type(), err, "MarshalJSON"})
@@ -556,7 +522,7 @@ func textMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) {
if err != nil {
e.error(&MarshalerError{v.Type(), err, "MarshalText"})
}
e.stringBytes(b, opts.escapeHTML)
e.Write(appendString(e.AvailableBuffer(), b, opts.escapeHTML))
}
func addrTextMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) {
@@ -570,43 +536,31 @@ func addrTextMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) {
if err != nil {
e.error(&MarshalerError{v.Type(), err, "MarshalText"})
}
e.stringBytes(b, opts.escapeHTML)
e.Write(appendString(e.AvailableBuffer(), b, opts.escapeHTML))
}
func boolEncoder(e *encodeState, v reflect.Value, opts encOpts) {
if opts.quoted {
e.WriteByte('"')
}
if v.Bool() {
e.WriteString("true")
} else {
e.WriteString("false")
}
if opts.quoted {
e.WriteByte('"')
}
b := e.AvailableBuffer()
b = mayAppendQuote(b, opts.quoted)
b = strconv.AppendBool(b, v.Bool())
b = mayAppendQuote(b, opts.quoted)
e.Write(b)
}
func intEncoder(e *encodeState, v reflect.Value, opts encOpts) {
b := strconv.AppendInt(e.scratch[:0], v.Int(), 10)
if opts.quoted {
e.WriteByte('"')
}
b := e.AvailableBuffer()
b = mayAppendQuote(b, opts.quoted)
b = strconv.AppendInt(b, v.Int(), 10)
b = mayAppendQuote(b, opts.quoted)
e.Write(b)
if opts.quoted {
e.WriteByte('"')
}
}
func uintEncoder(e *encodeState, v reflect.Value, opts encOpts) {
b := strconv.AppendUint(e.scratch[:0], v.Uint(), 10)
if opts.quoted {
e.WriteByte('"')
}
b := e.AvailableBuffer()
b = mayAppendQuote(b, opts.quoted)
b = strconv.AppendUint(b, v.Uint(), 10)
b = mayAppendQuote(b, opts.quoted)
e.Write(b)
if opts.quoted {
e.WriteByte('"')
}
}
type floatEncoder int // number of bits
@@ -622,7 +576,8 @@ func (bits floatEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
// See golang.org/issue/6384 and golang.org/issue/14135.
// Like fmt %g, but the exponent cutoffs are different
// and exponents themselves are not padded to two digits.
b := e.scratch[:0]
b := e.AvailableBuffer()
b = mayAppendQuote(b, opts.quoted)
abs := math.Abs(f)
fmt := byte('f')
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
@@ -640,14 +595,8 @@ func (bits floatEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
b = b[:n-1]
}
}
if opts.quoted {
e.WriteByte('"')
}
b = mayAppendQuote(b, opts.quoted)
e.Write(b)
if opts.quoted {
e.WriteByte('"')
}
}
var (
@@ -666,28 +615,32 @@ func stringEncoder(e *encodeState, v reflect.Value, opts encOpts) {
if !isValidNumber(numStr) {
e.error(fmt.Errorf("json: invalid number literal %q", numStr))
}
if opts.quoted {
e.WriteByte('"')
}
e.WriteString(numStr)
if opts.quoted {
e.WriteByte('"')
}
b := e.AvailableBuffer()
b = mayAppendQuote(b, opts.quoted)
b = append(b, numStr...)
b = mayAppendQuote(b, opts.quoted)
e.Write(b)
return
}
if opts.quoted {
e2 := newEncodeState()
// Since we encode the string twice, we only need to escape HTML
// the first time.
e2.string(v.String(), opts.escapeHTML)
e.stringBytes(e2.Bytes(), false)
encodeStatePool.Put(e2)
b := appendString(nil, v.String(), opts.escapeHTML)
e.Write(appendString(e.AvailableBuffer(), b, false)) // no need to escape again since it is already escaped
} else {
e.string(v.String(), opts.escapeHTML)
e.Write(appendString(e.AvailableBuffer(), v.String(), opts.escapeHTML))
}
}
// isValidNumber reports whether s is a valid JSON number literal.
//
// isValidNumber should be an internal detail,
// but widely used packages access it using linkname.
// Notable members of the hall of shame include:
// - github.com/bytedance/sonic
//
// Do not remove or change the type signature.
// See go.dev/issue/67401.
//
//go:linkname isValidNumber
func isValidNumber(s string) bool {
// This function implements the JSON numbers grammar.
// See https://tools.ietf.org/html/rfc7159#section-6
@@ -764,8 +717,9 @@ type structEncoder struct {
}
type structFields struct {
list []field
nameIndex map[string]int
list []field
byExactName map[string]*field
byFoldedName map[string]*field
}
func (se structEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
@@ -788,7 +742,7 @@ FieldLoop:
if f.omitEmpty && isEmptyValue(fv) {
continue
} else if opts.filter != nil && len(f.jsonfilter) > 0 && !f.jsonfilter.Contains(*opts.filter) {
} else if !matchesJSONFilter(f.jsonfilter, opts.filter) {
continue
}
e.WriteByte(next)
@@ -808,6 +762,25 @@ FieldLoop:
}
}
func matchesJSONFilter(filter jsonfilter, value *string) bool {
if len(filter) == 0 {
return true // no filter in struct
}
if value == nil || *value == "" {
return false // no filter set, but struct has filter, return false
}
if len(filter) == 1 && filter[0] == "-" {
return false
}
if filter.Contains(*value) {
return true
}
if filter.Contains("*") {
return true
}
return false
}
func newStructEncoder(t reflect.Type, tagkey string) encoderFunc {
se := structEncoder{fields: cachedTypeFields(t, tagkey)}
return se.encode
@@ -839,22 +812,26 @@ func (me mapEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
e.WriteByte('{')
// Extract and sort the keys.
sv := make([]reflectWithString, v.Len())
mi := v.MapRange()
var (
sv = make([]reflectWithString, v.Len())
mi = v.MapRange()
err error
)
for i := 0; mi.Next(); i++ {
sv[i].k = mi.Key()
sv[i].v = mi.Value()
if err := sv[i].resolve(); err != nil {
if sv[i].ks, err = resolveKeyName(mi.Key()); err != nil {
e.error(fmt.Errorf("json: encoding error for type %q: %q", v.Type().String(), err.Error()))
}
sv[i].v = mi.Value()
}
sort.Slice(sv, func(i, j int) bool { return sv[i].ks < sv[j].ks })
slices.SortFunc(sv, func(i, j reflectWithString) int {
return strings.Compare(i.ks, j.ks)
})
for i, kv := range sv {
if i > 0 {
e.WriteByte(',')
}
e.string(kv.ks, opts.escapeHTML)
e.Write(appendString(e.AvailableBuffer(), kv.ks, opts.escapeHTML))
e.WriteByte(':')
me.elemEnc(e, kv.v, opts)
}
@@ -885,29 +862,13 @@ func encodeByteSlice(e *encodeState, v reflect.Value, opts encOpts) {
}
return
}
s := v.Bytes()
e.WriteByte('"')
encodedLen := base64.StdEncoding.EncodedLen(len(s))
if encodedLen <= len(e.scratch) {
// If the encoded bytes fit in e.scratch, avoid an extra
// allocation and use the cheaper Encoding.Encode.
dst := e.scratch[:encodedLen]
base64.StdEncoding.Encode(dst, s)
e.Write(dst)
} else if encodedLen <= 1024 {
// The encoded bytes are short enough to allocate for, and
// Encoding.Encode is still cheaper.
dst := make([]byte, encodedLen)
base64.StdEncoding.Encode(dst, s)
e.Write(dst)
} else {
// The encoded bytes are too long to cheaply allocate, and
// Encoding.Encode is no longer noticeably cheaper.
enc := base64.NewEncoder(base64.StdEncoding, e)
enc.Write(s)
enc.Close()
}
e.WriteByte('"')
b := e.AvailableBuffer()
b = append(b, '"')
b = base64.StdEncoding.AppendEncode(b, s)
b = append(b, '"')
e.Write(b)
}
// sliceEncoder just wraps an arrayEncoder, checking to make sure the value isn't nil.
@@ -1051,78 +1012,77 @@ func typeByIndex(t reflect.Type, index []int) reflect.Type {
}
type reflectWithString struct {
k reflect.Value
v reflect.Value
ks string
}
func (w *reflectWithString) resolve() error {
if w.k.Kind() == reflect.String {
w.ks = w.k.String()
return nil
func resolveKeyName(k reflect.Value) (string, error) {
if k.Kind() == reflect.String {
return k.String(), nil
}
if tm, ok := w.k.Interface().(encoding.TextMarshaler); ok {
if w.k.Kind() == reflect.Pointer && w.k.IsNil() {
return nil
if tm, ok := k.Interface().(encoding.TextMarshaler); ok {
if k.Kind() == reflect.Pointer && k.IsNil() {
return "", nil
}
buf, err := tm.MarshalText()
w.ks = string(buf)
return err
return string(buf), err
}
switch w.k.Kind() {
switch k.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
w.ks = strconv.FormatInt(w.k.Int(), 10)
return nil
return strconv.FormatInt(k.Int(), 10), nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
w.ks = strconv.FormatUint(w.k.Uint(), 10)
return nil
return strconv.FormatUint(k.Uint(), 10), nil
}
panic("unexpected map key type")
}
// NOTE: keep in sync with stringBytes below.
func (e *encodeState) string(s string, escapeHTML bool) {
e.WriteByte('"')
func appendString[Bytes []byte | string](dst []byte, src Bytes, escapeHTML bool) []byte {
dst = append(dst, '"')
start := 0
for i := 0; i < len(s); {
if b := s[i]; b < utf8.RuneSelf {
for i := 0; i < len(src); {
if b := src[i]; b < utf8.RuneSelf {
if htmlSafeSet[b] || (!escapeHTML && safeSet[b]) {
i++
continue
}
if start < i {
e.WriteString(s[start:i])
}
e.WriteByte('\\')
dst = append(dst, src[start:i]...)
switch b {
case '\\', '"':
e.WriteByte(b)
dst = append(dst, '\\', b)
case '\b':
dst = append(dst, '\\', 'b')
case '\f':
dst = append(dst, '\\', 'f')
case '\n':
e.WriteByte('n')
dst = append(dst, '\\', 'n')
case '\r':
e.WriteByte('r')
dst = append(dst, '\\', 'r')
case '\t':
e.WriteByte('t')
dst = append(dst, '\\', 't')
default:
// This encodes bytes < 0x20 except for \t, \n and \r.
// This encodes bytes < 0x20 except for \b, \f, \n, \r and \t.
// If escapeHTML is set, it also escapes <, >, and &
// because they can lead to security holes when
// user-controlled strings are rendered into JSON
// and served to some browsers.
e.WriteString(`u00`)
e.WriteByte(hex[b>>4])
e.WriteByte(hex[b&0xF])
dst = append(dst, '\\', 'u', '0', '0', hex[b>>4], hex[b&0xF])
}
i++
start = i
continue
}
c, size := utf8.DecodeRuneInString(s[i:])
// TODO(https://go.dev/issue/56948): Use generic utf8 functionality.
// For now, cast only a small portion of byte slices to a string
// so that it can be stack allocated. This slows down []byte slightly
// due to the extra copy, but keeps string performance roughly the same.
n := len(src) - i
if n > utf8.UTFMax {
n = utf8.UTFMax
}
c, size := utf8.DecodeRuneInString(string(src[i : i+n]))
if c == utf8.RuneError && size == 1 {
if start < i {
e.WriteString(s[start:i])
}
e.WriteString(`\ufffd`)
dst = append(dst, src[start:i]...)
dst = append(dst, `\ufffd`...)
i += size
start = i
continue
@@ -1133,102 +1093,25 @@ func (e *encodeState) string(s string, escapeHTML bool) {
// but don't work in JSONP, which has to be evaluated as JavaScript,
// and can lead to security holes there. It is valid JSON to
// escape them, so we do so unconditionally.
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
// See https://en.wikipedia.org/wiki/JSON#Safety.
if c == '\u2028' || c == '\u2029' {
if start < i {
e.WriteString(s[start:i])
}
e.WriteString(`\u202`)
e.WriteByte(hex[c&0xF])
dst = append(dst, src[start:i]...)
dst = append(dst, '\\', 'u', '2', '0', '2', hex[c&0xF])
i += size
start = i
continue
}
i += size
}
if start < len(s) {
e.WriteString(s[start:])
}
e.WriteByte('"')
}
// NOTE: keep in sync with string above.
func (e *encodeState) stringBytes(s []byte, escapeHTML bool) {
e.WriteByte('"')
start := 0
for i := 0; i < len(s); {
if b := s[i]; b < utf8.RuneSelf {
if htmlSafeSet[b] || (!escapeHTML && safeSet[b]) {
i++
continue
}
if start < i {
e.Write(s[start:i])
}
e.WriteByte('\\')
switch b {
case '\\', '"':
e.WriteByte(b)
case '\n':
e.WriteByte('n')
case '\r':
e.WriteByte('r')
case '\t':
e.WriteByte('t')
default:
// This encodes bytes < 0x20 except for \t, \n and \r.
// If escapeHTML is set, it also escapes <, >, and &
// because they can lead to security holes when
// user-controlled strings are rendered into JSON
// and served to some browsers.
e.WriteString(`u00`)
e.WriteByte(hex[b>>4])
e.WriteByte(hex[b&0xF])
}
i++
start = i
continue
}
c, size := utf8.DecodeRune(s[i:])
if c == utf8.RuneError && size == 1 {
if start < i {
e.Write(s[start:i])
}
e.WriteString(`\ufffd`)
i += size
start = i
continue
}
// U+2028 is LINE SEPARATOR.
// U+2029 is PARAGRAPH SEPARATOR.
// They are both technically valid characters in JSON strings,
// but don't work in JSONP, which has to be evaluated as JavaScript,
// and can lead to security holes there. It is valid JSON to
// escape them, so we do so unconditionally.
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
if c == '\u2028' || c == '\u2029' {
if start < i {
e.Write(s[start:i])
}
e.WriteString(`\u202`)
e.WriteByte(hex[c&0xF])
i += size
start = i
continue
}
i += size
}
if start < len(s) {
e.Write(s[start:])
}
e.WriteByte('"')
dst = append(dst, src[start:]...)
dst = append(dst, '"')
return dst
}
// A field represents a single field found in a struct.
type field struct {
name string
nameBytes []byte // []byte(name)
equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent
nameBytes []byte // []byte(name)
nameNonEsc string // `"` + name + `":`
nameEscHTML string // `"` + HTMLEscape(name) + `":`
@@ -1255,28 +1138,19 @@ func (j jsonfilter) Contains(t string) bool {
return false
}
// byIndex sorts field by index sequence.
type byIndex []field
func (x byIndex) Len() int { return len(x) }
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byIndex) Less(i, j int) bool {
for k, xik := range x[i].index {
if k >= len(x[j].index) {
return false
}
if xik != x[j].index[k] {
return xik < x[j].index[k]
}
}
return len(x[i].index) < len(x[j].index)
}
// typeFields returns a list of fields that JSON should recognize for the given type.
// The algorithm is breadth-first search over the set of structs to include - the top struct
// and then any reachable anonymous structs.
//
// typeFields should be an internal detail,
// but widely used packages access it using linkname.
// Notable members of the hall of shame include:
// - github.com/bytedance/sonic
//
// Do not remove or change the type signature.
// See go.dev/issue/67401.
//
//go:linkname typeFields
func typeFields(t reflect.Type, tagkey string) structFields {
// Anonymous fields to explore at the current level and the next.
current := []field{}
@@ -1291,8 +1165,8 @@ func typeFields(t reflect.Type, tagkey string) structFields {
// Fields found.
var fields []field
// Buffer to run HTMLEscape on field names.
var nameEscBuf bytes.Buffer
// Buffer to run appendHTMLEscape on field names.
var nameEscBuf []byte
for len(next) > 0 {
current, next = next, current[:0]
@@ -1331,10 +1205,10 @@ func typeFields(t reflect.Type, tagkey string) structFields {
name = ""
}
var jsonfilter []string
var jsonfilterVal []string
jsonfilterTag := sf.Tag.Get("jsonfilter")
if jsonfilterTag != "" && jsonfilterTag != "-" {
jsonfilter = strings.Split(jsonfilterTag, ",")
if jsonfilterTag != "" {
jsonfilterVal = strings.Split(jsonfilterTag, ",")
}
index := make([]int, len(f.index)+1)
@@ -1372,25 +1246,21 @@ func typeFields(t reflect.Type, tagkey string) structFields {
index: index,
typ: ft,
omitEmpty: opts.Contains("omitempty"),
jsonfilter: jsonfilter,
jsonfilter: jsonfilterVal,
quoted: quoted,
}
field.nameBytes = []byte(field.name)
field.equalFold = foldFunc(field.nameBytes)
// Build nameEscHTML and nameNonEsc ahead of time.
nameEscBuf.Reset()
nameEscBuf.WriteString(`"`)
HTMLEscape(&nameEscBuf, field.nameBytes)
nameEscBuf.WriteString(`":`)
field.nameEscHTML = nameEscBuf.String()
nameEscBuf = appendHTMLEscape(nameEscBuf[:0], field.nameBytes)
field.nameEscHTML = `"` + string(nameEscBuf) + `":`
field.nameNonEsc = `"` + field.name + `":`
fields = append(fields, field)
if count[f.typ] > 1 {
// If there were multiple instances, add a second,
// so that the annihilation code will see a duplicate.
// It only cares about the distinction between 1 or 2,
// It only cares about the distinction between 1 and 2,
// so don't bother generating any more copies.
fields = append(fields, fields[len(fields)-1])
}
@@ -1406,21 +1276,23 @@ func typeFields(t reflect.Type, tagkey string) structFields {
}
}
sort.Slice(fields, func(i, j int) bool {
x := fields
slices.SortFunc(fields, func(a, b field) int {
// sort field by name, breaking ties with depth, then
// breaking ties with "name came from json tag", then
// breaking ties with index sequence.
if x[i].name != x[j].name {
return x[i].name < x[j].name
if c := strings.Compare(a.name, b.name); c != 0 {
return c
}
if len(x[i].index) != len(x[j].index) {
return len(x[i].index) < len(x[j].index)
if c := cmp.Compare(len(a.index), len(b.index)); c != 0 {
return c
}
if x[i].tag != x[j].tag {
return x[i].tag
if a.tag != b.tag {
if a.tag {
return -1
}
return +1
}
return byIndex(x).Less(i, j)
return slices.Compare(a.index, b.index)
})
// Delete all fields that are hidden by the Go rules for embedded fields,
@@ -1452,17 +1324,24 @@ func typeFields(t reflect.Type, tagkey string) structFields {
}
fields = out
sort.Sort(byIndex(fields))
slices.SortFunc(fields, func(i, j field) int {
return slices.Compare(i.index, j.index)
})
for i := range fields {
f := &fields[i]
f.encoder = typeEncoder(typeByIndex(t, f.index), tagkey)
}
nameIndex := make(map[string]int, len(fields))
exactNameIndex := make(map[string]*field, len(fields))
foldedNameIndex := make(map[string]*field, len(fields))
for i, field := range fields {
nameIndex[field.name] = i
exactNameIndex[field.name] = &fields[i]
// For historical reasons, first folded match takes precedence.
if _, ok := foldedNameIndex[string(foldName(field.nameBytes))]; !ok {
foldedNameIndex[string(foldName(field.nameBytes))] = &fields[i]
}
}
return structFields{fields, nameIndex}
return structFields{fields, exactNameIndex, foldedNameIndex}
}
// dominantField looks through the fields, all of which are known to
@@ -1481,26 +1360,25 @@ func dominantField(fields []field) (field, bool) {
return fields[0], true
}
var fieldCache sync.Map // map[string]map[reflect.Type]structFields
var fieldCache sync.Map // map[reflect.Type + tagkey]structFields
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
func cachedTypeFields(t reflect.Type, tagkey string) structFields {
if m0, ok := fieldCache.Load(tagkey); ok {
if f, ok := m0.(*sync.Map).Load(t); ok {
return f.(structFields)
}
f, _ := m0.(*sync.Map).LoadOrStore(t, typeFields(t, tagkey))
return f.(structFields)
} else {
m0 := &sync.Map{}
f, _ := m0.LoadOrStore(t, typeFields(t, tagkey))
fieldCache.Store(tagkey, m0)
if f, ok := fieldCache.Load(TagKeyTypeKey{t, tagkey}); ok {
return f.(structFields)
}
f, _ := fieldCache.LoadOrStore(TagKeyTypeKey{t, tagkey}, typeFields(t, tagkey))
return f.(structFields)
}
func mayAppendQuote(b []byte, quoted bool) []byte {
if quoted {
b = append(b, '"')
}
return b
}
type TagKeyTypeKey struct {
Type reflect.Type
TagKey string
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,140 +5,44 @@
package json
import (
"bytes"
"unicode"
"unicode/utf8"
)
const (
caseMask = ^byte(0x20) // Mask to ignore case in ASCII.
kelvin = '\u212a'
smallLongEss = '\u017f'
)
// foldFunc returns one of four different case folding equivalence
// functions, from most general (and slow) to fastest:
//
// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8
// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S')
// 3) asciiEqualFold, no special, but includes non-letters (including _)
// 4) simpleLetterEqualFold, no specials, no non-letters.
//
// The letters S and K are special because they map to 3 runes, not just 2:
// - S maps to s and to U+017F 'ſ' Latin small letter long s
// - k maps to K and to U+212A '' Kelvin sign
//
// See https://play.golang.org/p/tTxjOc0OGo
//
// The returned function is specialized for matching against s and
// should only be given s. It's not curried for performance reasons.
func foldFunc(s []byte) func(s, t []byte) bool {
nonLetter := false
special := false // special letter
for _, b := range s {
if b >= utf8.RuneSelf {
return bytes.EqualFold
}
upper := b & caseMask
if upper < 'A' || upper > 'Z' {
nonLetter = true
} else if upper == 'K' || upper == 'S' {
// See above for why these letters are special.
special = true
}
}
if special {
return equalFoldRight
}
if nonLetter {
return asciiEqualFold
}
return simpleLetterEqualFold
// foldName returns a folded string such that foldName(x) == foldName(y)
// is identical to bytes.EqualFold(x, y).
func foldName(in []byte) []byte {
// This is inlinable to take advantage of "function outlining".
var arr [32]byte // large enough for most JSON names
return appendFoldedName(arr[:0], in)
}
// equalFoldRight is a specialization of bytes.EqualFold when s is
// known to be all ASCII (including punctuation), but contains an 's',
// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t.
// See comments on foldFunc.
func equalFoldRight(s, t []byte) bool {
for _, sb := range s {
if len(t) == 0 {
return false
}
tb := t[0]
if tb < utf8.RuneSelf {
if sb != tb {
sbUpper := sb & caseMask
if 'A' <= sbUpper && sbUpper <= 'Z' {
if sbUpper != tb&caseMask {
return false
}
} else {
return false
}
func appendFoldedName(out, in []byte) []byte {
for i := 0; i < len(in); {
// Handle single-byte ASCII.
if c := in[i]; c < utf8.RuneSelf {
if 'a' <= c && c <= 'z' {
c -= 'a' - 'A'
}
t = t[1:]
out = append(out, c)
i++
continue
}
// sb is ASCII and t is not. t must be either kelvin
// sign or long s; sb must be s, S, k, or K.
tr, size := utf8.DecodeRune(t)
switch sb {
case 's', 'S':
if tr != smallLongEss {
return false
}
case 'k', 'K':
if tr != kelvin {
return false
}
default:
return false
}
t = t[size:]
// Handle multi-byte Unicode.
r, n := utf8.DecodeRune(in[i:])
out = utf8.AppendRune(out, foldRune(r))
i += n
}
if len(t) > 0 {
return false
}
return true
return out
}
// asciiEqualFold is a specialization of bytes.EqualFold for use when
// s is all ASCII (but may contain non-letters) and contains no
// special-folding letters.
// See comments on foldFunc.
func asciiEqualFold(s, t []byte) bool {
if len(s) != len(t) {
return false
}
for i, sb := range s {
tb := t[i]
if sb == tb {
continue
}
if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') {
if sb&caseMask != tb&caseMask {
return false
}
} else {
return false
// foldRune is returns the smallest rune for all runes in the same fold set.
func foldRune(r rune) rune {
for {
r2 := unicode.SimpleFold(r)
if r2 <= r {
return r2
}
r = r2
}
return true
}
// simpleLetterEqualFold is a specialization of bytes.EqualFold for
// use when s is all ASCII letters (no underscores, etc) and also
// doesn't contain 'k', 'K', 's', or 'S'.
// See comments on foldFunc.
func simpleLetterEqualFold(s, t []byte) bool {
if len(s) != len(t) {
return false
}
for i, b := range s {
if b&caseMask != t[i]&caseMask {
return false
}
}
return true
}

View File

@@ -6,111 +6,45 @@ package json
import (
"bytes"
"strings"
"testing"
"unicode/utf8"
)
var foldTests = []struct {
fn func(s, t []byte) bool
s, t string
want bool
}{
{equalFoldRight, "", "", true},
{equalFoldRight, "a", "a", true},
{equalFoldRight, "", "a", false},
{equalFoldRight, "a", "", false},
{equalFoldRight, "a", "A", true},
{equalFoldRight, "AB", "ab", true},
{equalFoldRight, "AB", "ac", false},
{equalFoldRight, "sbkKc", "ſbKc", true},
{equalFoldRight, "SbKkc", "ſbKc", true},
{equalFoldRight, "SbKkc", "ſbKK", false},
{equalFoldRight, "e", "é", false},
{equalFoldRight, "s", "S", true},
{simpleLetterEqualFold, "", "", true},
{simpleLetterEqualFold, "abc", "abc", true},
{simpleLetterEqualFold, "abc", "ABC", true},
{simpleLetterEqualFold, "abc", "ABCD", false},
{simpleLetterEqualFold, "abc", "xxx", false},
{asciiEqualFold, "a_B", "A_b", true},
{asciiEqualFold, "aa@", "aa`", false}, // verify 0x40 and 0x60 aren't case-equivalent
}
func TestFold(t *testing.T) {
for i, tt := range foldTests {
if got := tt.fn([]byte(tt.s), []byte(tt.t)); got != tt.want {
t.Errorf("%d. %q, %q = %v; want %v", i, tt.s, tt.t, got, tt.want)
func FuzzEqualFold(f *testing.F) {
for _, ss := range [][2]string{
{"", ""},
{"123abc", "123ABC"},
{"αβδ", "ΑΒΔ"},
{"abc", "xyz"},
{"abc", "XYZ"},
{"1", "2"},
{"hello, world!", "hello, world!"},
{"hello, world!", "Hello, World!"},
{"hello, world!", "HELLO, WORLD!"},
{"hello, world!", "jello, world!"},
{"γειά, κόσμε!", "γειά, κόσμε!"},
{"γειά, κόσμε!", "Γειά, Κόσμε!"},
{"γειά, κόσμε!", "ΓΕΙΆ, ΚΌΣΜΕ!"},
{"γειά, κόσμε!", "ΛΕΙΆ, ΚΌΣΜΕ!"},
{"AESKey", "aesKey"},
{"AESKEY", "aes_key"},
{"aes_key", "AES_KEY"},
{"AES_KEY", "aes-key"},
{"aes-key", "AES-KEY"},
{"AES-KEY", "aesKey"},
{"aesKey", "AesKey"},
{"AesKey", "AESKey"},
{"AESKey", "aeskey"},
{"DESKey", "aeskey"},
{"AES Key", "aeskey"},
} {
f.Add([]byte(ss[0]), []byte(ss[1]))
}
equalFold := func(x, y []byte) bool { return string(foldName(x)) == string(foldName(y)) }
f.Fuzz(func(t *testing.T, x, y []byte) {
got := equalFold(x, y)
want := bytes.EqualFold(x, y)
if got != want {
t.Errorf("equalFold(%q, %q) = %v, want %v", x, y, got, want)
}
truth := strings.EqualFold(tt.s, tt.t)
if truth != tt.want {
t.Errorf("strings.EqualFold doesn't agree with case %d", i)
}
}
}
func TestFoldAgainstUnicode(t *testing.T) {
const bufSize = 5
buf1 := make([]byte, 0, bufSize)
buf2 := make([]byte, 0, bufSize)
var runes []rune
for i := 0x20; i <= 0x7f; i++ {
runes = append(runes, rune(i))
}
runes = append(runes, kelvin, smallLongEss)
funcs := []struct {
name string
fold func(s, t []byte) bool
letter bool // must be ASCII letter
simple bool // must be simple ASCII letter (not 'S' or 'K')
}{
{
name: "equalFoldRight",
fold: equalFoldRight,
},
{
name: "asciiEqualFold",
fold: asciiEqualFold,
simple: true,
},
{
name: "simpleLetterEqualFold",
fold: simpleLetterEqualFold,
simple: true,
letter: true,
},
}
for _, ff := range funcs {
for _, r := range runes {
if r >= utf8.RuneSelf {
continue
}
if ff.letter && !isASCIILetter(byte(r)) {
continue
}
if ff.simple && (r == 's' || r == 'S' || r == 'k' || r == 'K') {
continue
}
for _, r2 := range runes {
buf1 := append(buf1[:0], 'x')
buf2 := append(buf2[:0], 'x')
buf1 = buf1[:1+utf8.EncodeRune(buf1[1:bufSize], r)]
buf2 = buf2[:1+utf8.EncodeRune(buf2[1:bufSize], r2)]
buf1 = append(buf1, 'x')
buf2 = append(buf2, 'x')
want := bytes.EqualFold(buf1, buf2)
if got := ff.fold(buf1, buf2); got != want {
t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want)
}
}
}
}
}
func isASCIILetter(b byte) bool {
return ('A' <= b && b <= 'Z') || ('a' <= b && b <= 'z')
})
}

View File

@@ -1,42 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gofuzz
package json
import (
"fmt"
)
func Fuzz(data []byte) (score int) {
for _, ctor := range []func() any{
func() any { return new(any) },
func() any { return new(map[string]any) },
func() any { return new([]any) },
} {
v := ctor()
err := Unmarshal(data, v)
if err != nil {
continue
}
score = 1
m, err := Marshal(v)
if err != nil {
fmt.Printf("v=%#v\n", v)
panic(err)
}
u := ctor()
err = Unmarshal(m, u)
if err != nil {
fmt.Printf("v=%#v\n", v)
fmt.Printf("m=%s\n", m)
panic(err)
}
}
return
}

View File

@@ -25,7 +25,6 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error {
if val := header["Content-Type"]; len(val) == 0 {
header["Content-Type"] = []string{"application/json; charset=utf-8"}
}
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter)
if err != nil {
panic(err)

View File

@@ -4,38 +4,67 @@
package json
import (
"bytes"
)
import "bytes"
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
// so that the JSON will be safe to embed inside HTML <script> tags.
// For historical reasons, web browsers don't honor standard HTML
// escaping within <script> tags, so an alternative JSON encoding must be used.
func HTMLEscape(dst *bytes.Buffer, src []byte) {
dst.Grow(len(src))
dst.Write(appendHTMLEscape(dst.AvailableBuffer(), src))
}
func appendHTMLEscape(dst, src []byte) []byte {
// The characters can only appear in string literals,
// so just scan the string one byte at a time.
start := 0
for i, c := range src {
if c == '<' || c == '>' || c == '&' {
dst = append(dst, src[start:i]...)
dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF])
start = i + 1
}
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
dst = append(dst, src[start:i]...)
dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF])
start = i + len("\u2029")
}
}
return append(dst, src[start:]...)
}
// Compact appends to dst the JSON-encoded src with
// insignificant space characters elided.
func Compact(dst *bytes.Buffer, src []byte) error {
return compact(dst, src, false)
dst.Grow(len(src))
b := dst.AvailableBuffer()
b, err := appendCompact(b, src, false)
dst.Write(b)
return err
}
func compact(dst *bytes.Buffer, src []byte, escape bool) error {
origLen := dst.Len()
func appendCompact(dst, src []byte, escape bool) ([]byte, error) {
origLen := len(dst)
scan := newScanner()
defer freeScanner(scan)
start := 0
for i, c := range src {
if escape && (c == '<' || c == '>' || c == '&') {
if start < i {
dst.Write(src[start:i])
dst = append(dst, src[start:i]...)
}
dst.WriteString(`\u00`)
dst.WriteByte(hex[c>>4])
dst.WriteByte(hex[c&0xF])
dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF])
start = i + 1
}
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
if escape && c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
if start < i {
dst.Write(src[start:i])
dst = append(dst, src[start:i]...)
}
dst.WriteString(`\u202`)
dst.WriteByte(hex[src[i+2]&0xF])
dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF])
start = i + 3
}
v := scan.step(scan, c)
@@ -44,29 +73,37 @@ func compact(dst *bytes.Buffer, src []byte, escape bool) error {
break
}
if start < i {
dst.Write(src[start:i])
dst = append(dst, src[start:i]...)
}
start = i + 1
}
}
if scan.eof() == scanError {
dst.Truncate(origLen)
return scan.err
return dst[:origLen], scan.err
}
if start < len(src) {
dst.Write(src[start:])
dst = append(dst, src[start:]...)
}
return nil
return dst, nil
}
func newline(dst *bytes.Buffer, prefix, indent string, depth int) {
dst.WriteByte('\n')
dst.WriteString(prefix)
func appendNewline(dst []byte, prefix, indent string, depth int) []byte {
dst = append(dst, '\n')
dst = append(dst, prefix...)
for i := 0; i < depth; i++ {
dst.WriteString(indent)
dst = append(dst, indent...)
}
return dst
}
// indentGrowthFactor specifies the growth factor of indenting JSON input.
// Empirically, the growth factor was measured to be between 1.4x to 1.8x
// for some set of compacted JSON with the indent being a single tab.
// Specify a growth factor slightly larger than what is observed
// to reduce probability of allocation in appendIndent.
// A factor no higher than 2 ensures that wasted space never exceeds 50%.
const indentGrowthFactor = 2
// Indent appends to dst an indented form of the JSON-encoded src.
// Each element in a JSON object or array begins on a new,
// indented line beginning with prefix followed by one or more
@@ -79,7 +116,15 @@ func newline(dst *bytes.Buffer, prefix, indent string, depth int) {
// For example, if src has no trailing spaces, neither will dst;
// if src ends in a trailing newline, so will dst.
func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
origLen := dst.Len()
dst.Grow(indentGrowthFactor * len(src))
b := dst.AvailableBuffer()
b, err := appendIndent(b, src, prefix, indent)
dst.Write(b)
return err
}
func appendIndent(dst, src []byte, prefix, indent string) ([]byte, error) {
origLen := len(dst)
scan := newScanner()
defer freeScanner(scan)
needIndent := false
@@ -96,13 +141,13 @@ func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
if needIndent && v != scanEndObject && v != scanEndArray {
needIndent = false
depth++
newline(dst, prefix, indent, depth)
dst = appendNewline(dst, prefix, indent, depth)
}
// Emit semantically uninteresting bytes
// (in particular, punctuation in strings) unmodified.
if v == scanContinue {
dst.WriteByte(c)
dst = append(dst, c)
continue
}
@@ -111,33 +156,27 @@ func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
case '{', '[':
// delay indent so that empty object and array are formatted as {} and [].
needIndent = true
dst.WriteByte(c)
dst = append(dst, c)
case ',':
dst.WriteByte(c)
newline(dst, prefix, indent, depth)
dst = append(dst, c)
dst = appendNewline(dst, prefix, indent, depth)
case ':':
dst.WriteByte(c)
dst.WriteByte(' ')
dst = append(dst, c, ' ')
case '}', ']':
if needIndent {
// suppress indent in empty object/array
needIndent = false
} else {
depth--
newline(dst, prefix, indent, depth)
dst = appendNewline(dst, prefix, indent, depth)
}
dst.WriteByte(c)
dst = append(dst, c)
default:
dst.WriteByte(c)
dst = append(dst, c)
}
}
if scan.eof() == scanError {
dst.Truncate(origLen)
return scan.err
return dst[:origLen], scan.err
}
return nil
return dst, nil
}

View File

@@ -116,18 +116,3 @@ func TestNumberIsValid(t *testing.T) {
}
}
}
func BenchmarkNumberIsValid(b *testing.B) {
s := "-61657.61667E+61673"
for i := 0; i < b.N; i++ {
isValidNumber(s)
}
}
func BenchmarkNumberIsValidRegexp(b *testing.B) {
var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`)
s := "-61657.61667E+61673"
for i := 0; i < b.N; i++ {
jsonNumberRegexp.MatchString(s)
}
}

View File

@@ -43,7 +43,7 @@ func checkValid(data []byte, scan *scanner) error {
}
// A SyntaxError is a description of a JSON syntax error.
// Unmarshal will return a SyntaxError if the JSON can't be parsed.
// [Unmarshal] will return a SyntaxError if the JSON can't be parsed.
type SyntaxError struct {
msg string // description of error
Offset int64 // error occurred after reading Offset bytes
@@ -594,7 +594,7 @@ func (s *scanner) error(c byte, context string) int {
return scanError
}
// quoteChar formats c as a quoted character literal
// quoteChar formats c as a quoted character literal.
func quoteChar(c byte) string {
// special cases - different from quoted strings
if c == '\'' {

View File

@@ -9,51 +9,59 @@ import (
"math"
"math/rand"
"reflect"
"strings"
"testing"
)
var validTests = []struct {
data string
ok bool
}{
{`foo`, false},
{`}{`, false},
{`{]`, false},
{`{}`, true},
{`{"foo":"bar"}`, true},
{`{"foo":"bar","bar":{"baz":["qux"]}}`, true},
func indentNewlines(s string) string {
return strings.Join(strings.Split(s, "\n"), "\n\t")
}
func stripWhitespace(s string) string {
return strings.Map(func(r rune) rune {
if r == ' ' || r == '\n' || r == '\r' || r == '\t' {
return -1
}
return r
}, s)
}
func TestValid(t *testing.T) {
for _, tt := range validTests {
if ok := Valid([]byte(tt.data)); ok != tt.ok {
t.Errorf("Valid(%#q) = %v, want %v", tt.data, ok, tt.ok)
}
tests := []struct {
CaseName
data string
ok bool
}{
{Name(""), `foo`, false},
{Name(""), `}{`, false},
{Name(""), `{]`, false},
{Name(""), `{}`, true},
{Name(""), `{"foo":"bar"}`, true},
{Name(""), `{"foo":"bar","bar":{"baz":["qux"]}}`, true},
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
if ok := Valid([]byte(tt.data)); ok != tt.ok {
t.Errorf("%s: Valid(`%s`) = %v, want %v", tt.Where, tt.data, ok, tt.ok)
}
})
}
}
// Tests of simple examples.
type example struct {
compact string
indent string
}
var examples = []example{
{`1`, `1`},
{`{}`, `{}`},
{`[]`, `[]`},
{`{"":2}`, "{\n\t\"\": 2\n}"},
{`[3]`, "[\n\t3\n]"},
{`[1,2,3]`, "[\n\t1,\n\t2,\n\t3\n]"},
{`{"x":1}`, "{\n\t\"x\": 1\n}"},
{ex1, ex1i},
{"{\"\":\"<>&\u2028\u2029\"}", "{\n\t\"\": \"<>&\u2028\u2029\"\n}"}, // See golang.org/issue/34070
}
var ex1 = `[true,false,null,"x",1,1.5,0,-5e+2]`
var ex1i = `[
func TestCompactAndIndent(t *testing.T) {
tests := []struct {
CaseName
compact string
indent string
}{
{Name(""), `1`, `1`},
{Name(""), `{}`, `{}`},
{Name(""), `[]`, `[]`},
{Name(""), `{"":2}`, "{\n\t\"\": 2\n}"},
{Name(""), `[3]`, "[\n\t3\n]"},
{Name(""), `[1,2,3]`, "[\n\t1,\n\t2,\n\t3\n]"},
{Name(""), `{"x":1}`, "{\n\t\"x\": 1\n}"},
{Name(""), `[true,false,null,"x",1,1.5,0,-5e+2]`, `[
true,
false,
null,
@@ -62,25 +70,40 @@ var ex1i = `[
1.5,
0,
-5e+2
]`
func TestCompact(t *testing.T) {
]`},
{Name(""), "{\"\":\"<>&\u2028\u2029\"}", "{\n\t\"\": \"<>&\u2028\u2029\"\n}"}, // See golang.org/issue/34070
}
var buf bytes.Buffer
for _, tt := range examples {
buf.Reset()
if err := Compact(&buf, []byte(tt.compact)); err != nil {
t.Errorf("Compact(%#q): %v", tt.compact, err)
} else if s := buf.String(); s != tt.compact {
t.Errorf("Compact(%#q) = %#q, want original", tt.compact, s)
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
buf.Reset()
if err := Compact(&buf, []byte(tt.compact)); err != nil {
t.Errorf("%s: Compact error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.compact {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact))
}
buf.Reset()
if err := Compact(&buf, []byte(tt.indent)); err != nil {
t.Errorf("Compact(%#q): %v", tt.indent, err)
continue
} else if s := buf.String(); s != tt.compact {
t.Errorf("Compact(%#q) = %#q, want %#q", tt.indent, s, tt.compact)
}
buf.Reset()
if err := Compact(&buf, []byte(tt.indent)); err != nil {
t.Errorf("%s: Compact error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.compact {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact))
}
buf.Reset()
if err := Indent(&buf, []byte(tt.indent), "", "\t"); err != nil {
t.Errorf("%s: Indent error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.indent {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.indent))
}
buf.Reset()
if err := Indent(&buf, []byte(tt.compact), "", "\t"); err != nil {
t.Errorf("%s: Indent error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.indent {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.indent))
}
})
}
}
@@ -88,38 +111,21 @@ func TestCompactSeparators(t *testing.T) {
// U+2028 and U+2029 should be escaped inside strings.
// They should not appear outside strings.
tests := []struct {
CaseName
in, compact string
}{
{"{\"\u2028\": 1}", "{\"\u2028\":1}"},
{"{\"\u2029\" :2}", "{\"\u2029\":2}"},
{Name(""), "{\"\u2028\": 1}", "{\"\u2028\":1}"},
{Name(""), "{\"\u2029\" :2}", "{\"\u2029\":2}"},
}
for _, tt := range tests {
var buf bytes.Buffer
if err := Compact(&buf, []byte(tt.in)); err != nil {
t.Errorf("Compact(%q): %v", tt.in, err)
} else if s := buf.String(); s != tt.compact {
t.Errorf("Compact(%q) = %q, want %q", tt.in, s, tt.compact)
}
}
}
func TestIndent(t *testing.T) {
var buf bytes.Buffer
for _, tt := range examples {
buf.Reset()
if err := Indent(&buf, []byte(tt.indent), "", "\t"); err != nil {
t.Errorf("Indent(%#q): %v", tt.indent, err)
} else if s := buf.String(); s != tt.indent {
t.Errorf("Indent(%#q) = %#q, want original", tt.indent, s)
}
buf.Reset()
if err := Indent(&buf, []byte(tt.compact), "", "\t"); err != nil {
t.Errorf("Indent(%#q): %v", tt.compact, err)
continue
} else if s := buf.String(); s != tt.indent {
t.Errorf("Indent(%#q) = %#q, want %#q", tt.compact, s, tt.indent)
}
t.Run(tt.Name, func(t *testing.T) {
var buf bytes.Buffer
if err := Compact(&buf, []byte(tt.in)); err != nil {
t.Errorf("%s: Compact error: %v", tt.Where, err)
} else if got := buf.String(); got != tt.compact {
t.Errorf("%s: Compact:\n\tgot: %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact))
}
})
}
}
@@ -129,11 +135,11 @@ func TestCompactBig(t *testing.T) {
initBig()
var buf bytes.Buffer
if err := Compact(&buf, jsonBig); err != nil {
t.Fatalf("Compact: %v", err)
t.Fatalf("Compact error: %v", err)
}
b := buf.Bytes()
if !bytes.Equal(b, jsonBig) {
t.Error("Compact(jsonBig) != jsonBig")
t.Error("Compact:")
diff(t, b, jsonBig)
return
}
@@ -144,23 +150,23 @@ func TestIndentBig(t *testing.T) {
initBig()
var buf bytes.Buffer
if err := Indent(&buf, jsonBig, "", "\t"); err != nil {
t.Fatalf("Indent1: %v", err)
t.Fatalf("Indent error: %v", err)
}
b := buf.Bytes()
if len(b) == len(jsonBig) {
// jsonBig is compact (no unnecessary spaces);
// indenting should make it bigger
t.Fatalf("Indent(jsonBig) did not get bigger")
t.Fatalf("Indent did not expand the input")
}
// should be idempotent
var buf1 bytes.Buffer
if err := Indent(&buf1, b, "", "\t"); err != nil {
t.Fatalf("Indent2: %v", err)
t.Fatalf("Indent error: %v", err)
}
b1 := buf1.Bytes()
if !bytes.Equal(b1, b) {
t.Error("Indent(Indent(jsonBig)) != Indent(jsonBig)")
t.Error("Indent(Indent(jsonBig)) != Indent(jsonBig):")
diff(t, b1, b)
return
}
@@ -168,40 +174,40 @@ func TestIndentBig(t *testing.T) {
// should get back to original
buf1.Reset()
if err := Compact(&buf1, b); err != nil {
t.Fatalf("Compact: %v", err)
t.Fatalf("Compact error: %v", err)
}
b1 = buf1.Bytes()
if !bytes.Equal(b1, jsonBig) {
t.Error("Compact(Indent(jsonBig)) != jsonBig")
t.Error("Compact(Indent(jsonBig)) != jsonBig:")
diff(t, b1, jsonBig)
return
}
}
type indentErrorTest struct {
in string
err error
}
var indentErrorTests = []indentErrorTest{
{`{"X": "foo", "Y"}`, &SyntaxError{"invalid character '}' after object key", 17}},
{`{"X": "foo" "Y": "bar"}`, &SyntaxError{"invalid character '\"' after object key:value pair", 13}},
}
func TestIndentErrors(t *testing.T) {
for i, tt := range indentErrorTests {
slice := make([]uint8, 0)
buf := bytes.NewBuffer(slice)
if err := Indent(buf, []uint8(tt.in), "", ""); err != nil {
if !reflect.DeepEqual(err, tt.err) {
t.Errorf("#%d: Indent: %#v", i, err)
continue
tests := []struct {
CaseName
in string
err error
}{
{Name(""), `{"X": "foo", "Y"}`, &SyntaxError{"invalid character '}' after object key", 17}},
{Name(""), `{"X": "foo" "Y": "bar"}`, &SyntaxError{"invalid character '\"' after object key:value pair", 13}},
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
slice := make([]uint8, 0)
buf := bytes.NewBuffer(slice)
if err := Indent(buf, []uint8(tt.in), "", ""); err != nil {
if !reflect.DeepEqual(err, tt.err) {
t.Fatalf("%s: Indent error:\n\tgot: %v\n\twant: %v", tt.Where, err, tt.err)
}
}
}
})
}
}
func diff(t *testing.T, a, b []byte) {
t.Helper()
for i := 0; ; i++ {
if i >= len(a) || i >= len(b) || a[i] != b[i] {
j := i - 10
@@ -215,10 +221,7 @@ func diff(t *testing.T, a, b []byte) {
}
func trim(b []byte) []byte {
if len(b) > 20 {
return b[0:20]
}
return b
return b[:min(len(b), 20)]
}
// Generate a random JSON object.

View File

@@ -33,7 +33,7 @@ func NewDecoder(r io.Reader) *Decoder {
}
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
// Number instead of as a float64.
// [Number] instead of as a float64.
func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
// DisallowUnknownFields causes the Decoder to return an error when the destination
@@ -47,7 +47,7 @@ func (dec *Decoder) TagKey(v string) { dec.d.tagkey = &v }
// Decode reads the next JSON-encoded value from its
// input and stores it in the value pointed to by v.
//
// See the documentation for Unmarshal for details about
// See the documentation for [Unmarshal] for details about
// the conversion of JSON into a Go value.
func (dec *Decoder) Decode(v any) error {
if dec.err != nil {
@@ -82,7 +82,7 @@ func (dec *Decoder) Decode(v any) error {
}
// Buffered returns a reader of the data remaining in the Decoder's
// buffer. The reader is valid until the next call to Decode.
// buffer. The reader is valid until the next call to [Decoder.Decode].
func (dec *Decoder) Buffered() io.Reader {
return bytes.NewReader(dec.buf[dec.scanp:])
}
@@ -186,7 +186,7 @@ type Encoder struct {
err error
escapeHTML bool
indentBuf *bytes.Buffer
indentBuf []byte
indentPrefix string
indentValue string
}
@@ -197,15 +197,19 @@ func NewEncoder(w io.Writer) *Encoder {
}
// Encode writes the JSON encoding of v to the stream,
// with insignificant space characters elided,
// followed by a newline character.
//
// See the documentation for Marshal for details about the
// See the documentation for [Marshal] for details about the
// conversion of Go values to JSON.
func (enc *Encoder) Encode(v any) error {
if enc.err != nil {
return enc.err
}
e := newEncodeState()
defer encodeStatePool.Put(e)
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
if err != nil {
return err
@@ -221,20 +225,15 @@ func (enc *Encoder) Encode(v any) error {
b := e.Bytes()
if enc.indentPrefix != "" || enc.indentValue != "" {
if enc.indentBuf == nil {
enc.indentBuf = new(bytes.Buffer)
}
enc.indentBuf.Reset()
err = Indent(enc.indentBuf, b, enc.indentPrefix, enc.indentValue)
enc.indentBuf, err = appendIndent(enc.indentBuf[:0], b, enc.indentPrefix, enc.indentValue)
if err != nil {
return err
}
b = enc.indentBuf.Bytes()
b = enc.indentBuf
}
if _, err = enc.w.Write(b); err != nil {
enc.err = err
}
encodeStatePool.Put(e)
return err
}
@@ -258,7 +257,7 @@ func (enc *Encoder) SetEscapeHTML(on bool) {
}
// RawMessage is a raw encoded JSON value.
// It implements Marshaler and Unmarshaler and can
// It implements [Marshaler] and [Unmarshaler] and can
// be used to delay JSON decoding or precompute a JSON encoding.
type RawMessage []byte
@@ -284,12 +283,12 @@ var _ Unmarshaler = (*RawMessage)(nil)
// A Token holds a value of one of these types:
//
// Delim, for the four JSON delimiters [ ] { }
// bool, for JSON booleans
// float64, for JSON numbers
// Number, for JSON numbers
// string, for JSON string literals
// nil, for JSON null
// - [Delim], for the four JSON delimiters [ ] { }
// - bool, for JSON booleans
// - float64, for JSON numbers
// - [Number], for JSON numbers
// - string, for JSON string literals
// - nil, for JSON null
type Token any
const (
@@ -359,14 +358,14 @@ func (d Delim) String() string {
}
// Token returns the next JSON token in the input stream.
// At the end of the input stream, Token returns nil, io.EOF.
// At the end of the input stream, Token returns nil, [io.EOF].
//
// Token guarantees that the delimiters [ ] { } it returns are
// properly nested and matched: if Token encounters an unexpected
// delimiter in the input, it will return an error.
//
// The input stream consists of basic JSON values—bool, string,
// number, and null—along with delimiters [ ] { } of type Delim
// number, and null—along with delimiters [ ] { } of type [Delim]
// to mark the start and end of arrays and objects.
// Commas and colons are elided.
func (dec *Decoder) Token() (Token, error) {

View File

@@ -6,16 +6,44 @@ package json
import (
"bytes"
"fmt"
"io"
"log"
"net"
"net/http"
"net/http/httptest"
"path"
"reflect"
"runtime"
"runtime/debug"
"strings"
"testing"
)
// TODO(https://go.dev/issue/52751): Replace with native testing support.
// CaseName is a case name annotated with a file and line.
type CaseName struct {
Name string
Where CasePos
}
// Name annotates a case name with the file and line of the caller.
func Name(s string) (c CaseName) {
c.Name = s
runtime.Callers(2, c.Where.pc[:])
return c
}
// CasePos represents a file and line number.
type CasePos struct{ pc [1]uintptr }
func (pos CasePos) String() string {
frames := runtime.CallersFrames(pos.pc[:])
frame, _ := frames.Next()
return fmt.Sprintf("%s:%d", path.Base(frame.File), frame.Line)
}
// Test values for the stream test.
// One of each JSON kind.
var streamTest = []any{
@@ -41,24 +69,61 @@ false
func TestEncoder(t *testing.T) {
for i := 0; i <= len(streamTest); i++ {
var buf bytes.Buffer
var buf strings.Builder
enc := NewEncoder(&buf)
// Check that enc.SetIndent("", "") turns off indentation.
enc.SetIndent(">", ".")
enc.SetIndent("", "")
for j, v := range streamTest[0:i] {
if err := enc.Encode(v); err != nil {
t.Fatalf("encode #%d: %v", j, err)
t.Fatalf("#%d.%d Encode error: %v", i, j, err)
}
}
if have, want := buf.String(), nlines(streamEncoded, i); have != want {
t.Errorf("encoding %d items: mismatch", i)
t.Errorf("encoding %d items: mismatch:", i)
diff(t, []byte(have), []byte(want))
break
}
}
}
func TestEncoderErrorAndReuseEncodeState(t *testing.T) {
// Disable the GC temporarily to prevent encodeState's in Pool being cleaned away during the test.
percent := debug.SetGCPercent(-1)
defer debug.SetGCPercent(percent)
// Trigger an error in Marshal with cyclic data.
type Dummy struct {
Name string
Next *Dummy
}
dummy := Dummy{Name: "Dummy"}
dummy.Next = &dummy
var buf bytes.Buffer
enc := NewEncoder(&buf)
if err := enc.Encode(dummy); err == nil {
t.Errorf("Encode(dummy) error: got nil, want non-nil")
}
type Data struct {
A string
I int
}
want := Data{A: "a", I: 1}
if err := enc.Encode(want); err != nil {
t.Errorf("Marshal error: %v", err)
}
var got Data
if err := Unmarshal(buf.Bytes(), &got); err != nil {
t.Errorf("Unmarshal error: %v", err)
}
if got != want {
t.Errorf("Marshal/Unmarshal roundtrip:\n\tgot: %v\n\twant: %v", got, want)
}
}
var streamEncodedIndent = `0.1
"hello"
null
@@ -77,14 +142,14 @@ false
`
func TestEncoderIndent(t *testing.T) {
var buf bytes.Buffer
var buf strings.Builder
enc := NewEncoder(&buf)
enc.SetIndent(">", ".")
for _, v := range streamTest {
enc.Encode(v)
}
if have, want := buf.String(), streamEncodedIndent; have != want {
t.Error("indented encoding mismatch")
t.Error("Encode mismatch:")
diff(t, []byte(have), []byte(want))
}
}
@@ -122,50 +187,51 @@ func TestEncoderSetEscapeHTML(t *testing.T) {
Bar string `json:"bar,string"`
}{`<html>foobar</html>`}
for _, tt := range []struct {
name string
tests := []struct {
CaseName
v any
wantEscape string
want string
}{
{"c", c, `"\u003c\u0026\u003e"`, `"<&>"`},
{"ct", ct, `"\"\u003c\u0026\u003e\""`, `"\"<&>\""`},
{`"<&>"`, "<&>", `"\u003c\u0026\u003e"`, `"<&>"`},
{Name("c"), c, `"\u003c\u0026\u003e"`, `"<&>"`},
{Name("ct"), ct, `"\"\u003c\u0026\u003e\""`, `"\"<&>\""`},
{Name(`"<&>"`), "<&>", `"\u003c\u0026\u003e"`, `"<&>"`},
{
"tagStruct", tagStruct,
Name("tagStruct"), tagStruct,
`{"\u003c\u003e\u0026#! ":0,"Invalid":0}`,
`{"<>&#! ":0,"Invalid":0}`,
},
{
`"<str>"`, marshalerStruct,
Name(`"<str>"`), marshalerStruct,
`{"NonPtr":"\u003cstr\u003e","Ptr":"\u003cstr\u003e"}`,
`{"NonPtr":"<str>","Ptr":"<str>"}`,
},
{
"stringOption", stringOption,
Name("stringOption"), stringOption,
`{"bar":"\"\\u003chtml\\u003efoobar\\u003c/html\\u003e\""}`,
`{"bar":"\"<html>foobar</html>\""}`,
},
} {
var buf bytes.Buffer
enc := NewEncoder(&buf)
if err := enc.Encode(tt.v); err != nil {
t.Errorf("Encode(%s): %s", tt.name, err)
continue
}
if got := strings.TrimSpace(buf.String()); got != tt.wantEscape {
t.Errorf("Encode(%s) = %#q, want %#q", tt.name, got, tt.wantEscape)
}
buf.Reset()
enc.SetEscapeHTML(false)
if err := enc.Encode(tt.v); err != nil {
t.Errorf("SetEscapeHTML(false) Encode(%s): %s", tt.name, err)
continue
}
if got := strings.TrimSpace(buf.String()); got != tt.want {
t.Errorf("SetEscapeHTML(false) Encode(%s) = %#q, want %#q",
tt.name, got, tt.want)
}
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
var buf strings.Builder
enc := NewEncoder(&buf)
if err := enc.Encode(tt.v); err != nil {
t.Fatalf("%s: Encode(%s) error: %s", tt.Where, tt.Name, err)
}
if got := strings.TrimSpace(buf.String()); got != tt.wantEscape {
t.Errorf("%s: Encode(%s):\n\tgot: %s\n\twant: %s", tt.Where, tt.Name, got, tt.wantEscape)
}
buf.Reset()
enc.SetEscapeHTML(false)
if err := enc.Encode(tt.v); err != nil {
t.Fatalf("%s: SetEscapeHTML(false) Encode(%s) error: %s", tt.Where, tt.Name, err)
}
if got := strings.TrimSpace(buf.String()); got != tt.want {
t.Errorf("%s: SetEscapeHTML(false) Encode(%s):\n\tgot: %s\n\twant: %s",
tt.Where, tt.Name, got, tt.want)
}
})
}
}
@@ -186,14 +252,14 @@ func TestDecoder(t *testing.T) {
dec := NewDecoder(&buf)
for j := range out {
if err := dec.Decode(&out[j]); err != nil {
t.Fatalf("decode #%d/%d: %v", j, i, err)
t.Fatalf("decode #%d/%d error: %v", j, i, err)
}
}
if !reflect.DeepEqual(out, streamTest[0:i]) {
t.Errorf("decoding %d items: mismatch", i)
t.Errorf("decoding %d items: mismatch:", i)
for j := range out {
if !reflect.DeepEqual(out[j], streamTest[j]) {
t.Errorf("#%d: have %v want %v", j, out[j], streamTest[j])
t.Errorf("#%d:\n\tgot: %v\n\twant: %v", j, out[j], streamTest[j])
}
}
break
@@ -212,14 +278,14 @@ func TestDecoderBuffered(t *testing.T) {
t.Fatal(err)
}
if m.Name != "Gopher" {
t.Errorf("Name = %q; want Gopher", m.Name)
t.Errorf("Name = %s, want Gopher", m.Name)
}
rest, err := io.ReadAll(d.Buffered())
if err != nil {
t.Fatal(err)
}
if g, w := string(rest), " extra "; g != w {
t.Errorf("Remaining = %q; want %q", g, w)
if got, want := string(rest), " extra "; got != want {
t.Errorf("Remaining = %s, want %s", got, want)
}
}
@@ -244,20 +310,20 @@ func TestRawMessage(t *testing.T) {
Y float32
}
const raw = `["\u0056",null]`
const msg = `{"X":0.1,"Id":["\u0056",null],"Y":0.2}`
err := Unmarshal([]byte(msg), &data)
const want = `{"X":0.1,"Id":["\u0056",null],"Y":0.2}`
err := Unmarshal([]byte(want), &data)
if err != nil {
t.Fatalf("Unmarshal: %v", err)
t.Fatalf("Unmarshal error: %v", err)
}
if string([]byte(data.Id)) != raw {
t.Fatalf("Raw mismatch: have %#q want %#q", []byte(data.Id), raw)
t.Fatalf("Unmarshal:\n\tgot: %s\n\twant: %s", []byte(data.Id), raw)
}
b, err := Marshal(&data)
got, err := Marshal(&data)
if err != nil {
t.Fatalf("Marshal: %v", err)
t.Fatalf("Marshal error: %v", err)
}
if string(b) != msg {
t.Fatalf("Marshal: have %#q want %#q", b, msg)
if string(got) != want {
t.Fatalf("Marshal:\n\tgot: %s\n\twant: %s", got, want)
}
}
@@ -268,174 +334,156 @@ func TestNullRawMessage(t *testing.T) {
IdPtr *RawMessage
Y float32
}
const msg = `{"X":0.1,"Id":null,"IdPtr":null,"Y":0.2}`
err := Unmarshal([]byte(msg), &data)
const want = `{"X":0.1,"Id":null,"IdPtr":null,"Y":0.2}`
err := Unmarshal([]byte(want), &data)
if err != nil {
t.Fatalf("Unmarshal: %v", err)
t.Fatalf("Unmarshal error: %v", err)
}
if want, got := "null", string(data.Id); want != got {
t.Fatalf("Raw mismatch: have %q, want %q", got, want)
t.Fatalf("Unmarshal:\n\tgot: %s\n\twant: %s", got, want)
}
if data.IdPtr != nil {
t.Fatalf("Raw pointer mismatch: have non-nil, want nil")
t.Fatalf("pointer mismatch: got non-nil, want nil")
}
b, err := Marshal(&data)
got, err := Marshal(&data)
if err != nil {
t.Fatalf("Marshal: %v", err)
t.Fatalf("Marshal error: %v", err)
}
if string(b) != msg {
t.Fatalf("Marshal: have %#q want %#q", b, msg)
if string(got) != want {
t.Fatalf("Marshal:\n\tgot: %s\n\twant: %s", got, want)
}
}
var blockingTests = []string{
`{"x": 1}`,
`[1, 2, 3]`,
}
func TestBlocking(t *testing.T) {
for _, enc := range blockingTests {
r, w := net.Pipe()
go w.Write([]byte(enc))
var val any
// If Decode reads beyond what w.Write writes above,
// it will block, and the test will deadlock.
if err := NewDecoder(r).Decode(&val); err != nil {
t.Errorf("decoding %s: %v", enc, err)
}
r.Close()
w.Close()
tests := []struct {
CaseName
in string
}{
{Name(""), `{"x": 1}`},
{Name(""), `[1, 2, 3]`},
}
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
r, w := net.Pipe()
go w.Write([]byte(tt.in))
var val any
func BenchmarkEncoderEncode(b *testing.B) {
b.ReportAllocs()
type T struct {
X, Y string
}
v := &T{"foo", "bar"}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := NewEncoder(io.Discard).Encode(v); err != nil {
b.Fatal(err)
// If Decode reads beyond what w.Write writes above,
// it will block, and the test will deadlock.
if err := NewDecoder(r).Decode(&val); err != nil {
t.Errorf("%s: NewDecoder(%s).Decode error: %v", tt.Where, tt.in, err)
}
}
})
}
type tokenStreamCase struct {
json string
expTokens []any
r.Close()
w.Close()
})
}
}
type decodeThis struct {
v any
}
var tokenStreamCases = []tokenStreamCase{
// streaming token cases
{json: `10`, expTokens: []any{float64(10)}},
{json: ` [10] `, expTokens: []any{
Delim('['), float64(10), Delim(']')}},
{json: ` [false,10,"b"] `, expTokens: []any{
Delim('['), false, float64(10), "b", Delim(']')}},
{json: `{ "a": 1 }`, expTokens: []any{
Delim('{'), "a", float64(1), Delim('}')}},
{json: `{"a": 1, "b":"3"}`, expTokens: []any{
Delim('{'), "a", float64(1), "b", "3", Delim('}')}},
{json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{
Delim('['),
Delim('{'), "a", float64(1), Delim('}'),
Delim('{'), "a", float64(2), Delim('}'),
Delim(']')}},
{json: `{"obj": {"a": 1}}`, expTokens: []any{
Delim('{'), "obj", Delim('{'), "a", float64(1), Delim('}'),
Delim('}')}},
{json: `{"obj": [{"a": 1}]}`, expTokens: []any{
Delim('{'), "obj", Delim('['),
Delim('{'), "a", float64(1), Delim('}'),
Delim(']'), Delim('}')}},
// streaming tokens with intermittent Decode()
{json: `{ "a": 1 }`, expTokens: []any{
Delim('{'), "a",
decodeThis{float64(1)},
Delim('}')}},
{json: ` [ { "a" : 1 } ] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
Delim(']')}},
{json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
decodeThis{map[string]any{"a": float64(2)}},
Delim(']')}},
{json: `{ "obj" : [ { "a" : 1 } ] }`, expTokens: []any{
Delim('{'), "obj", Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
Delim(']'), Delim('}')}},
{json: `{"obj": {"a": 1}}`, expTokens: []any{
Delim('{'), "obj",
decodeThis{map[string]any{"a": float64(1)}},
Delim('}')}},
{json: `{"obj": [{"a": 1}]}`, expTokens: []any{
Delim('{'), "obj",
decodeThis{[]any{
map[string]any{"a": float64(1)},
}},
Delim('}')}},
{json: ` [{"a": 1} {"a": 2}] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
decodeThis{&SyntaxError{"expected comma after array element", 11}},
}},
{json: `{ "` + strings.Repeat("a", 513) + `" 1 }`, expTokens: []any{
Delim('{'), strings.Repeat("a", 513),
decodeThis{&SyntaxError{"expected colon after object key", 518}},
}},
{json: `{ "\a" }`, expTokens: []any{
Delim('{'),
&SyntaxError{"invalid character 'a' in string escape code", 3},
}},
{json: ` \a`, expTokens: []any{
&SyntaxError{"invalid character '\\\\' looking for beginning of value", 1},
}},
}
func TestDecodeInStream(t *testing.T) {
for ci, tcase := range tokenStreamCases {
tests := []struct {
CaseName
json string
expTokens []any
}{
// streaming token cases
{CaseName: Name(""), json: `10`, expTokens: []any{float64(10)}},
{CaseName: Name(""), json: ` [10] `, expTokens: []any{
Delim('['), float64(10), Delim(']')}},
{CaseName: Name(""), json: ` [false,10,"b"] `, expTokens: []any{
Delim('['), false, float64(10), "b", Delim(']')}},
{CaseName: Name(""), json: `{ "a": 1 }`, expTokens: []any{
Delim('{'), "a", float64(1), Delim('}')}},
{CaseName: Name(""), json: `{"a": 1, "b":"3"}`, expTokens: []any{
Delim('{'), "a", float64(1), "b", "3", Delim('}')}},
{CaseName: Name(""), json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{
Delim('['),
Delim('{'), "a", float64(1), Delim('}'),
Delim('{'), "a", float64(2), Delim('}'),
Delim(']')}},
{CaseName: Name(""), json: `{"obj": {"a": 1}}`, expTokens: []any{
Delim('{'), "obj", Delim('{'), "a", float64(1), Delim('}'),
Delim('}')}},
{CaseName: Name(""), json: `{"obj": [{"a": 1}]}`, expTokens: []any{
Delim('{'), "obj", Delim('['),
Delim('{'), "a", float64(1), Delim('}'),
Delim(']'), Delim('}')}},
dec := NewDecoder(strings.NewReader(tcase.json))
for i, etk := range tcase.expTokens {
// streaming tokens with intermittent Decode()
{CaseName: Name(""), json: `{ "a": 1 }`, expTokens: []any{
Delim('{'), "a",
decodeThis{float64(1)},
Delim('}')}},
{CaseName: Name(""), json: ` [ { "a" : 1 } ] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
Delim(']')}},
{CaseName: Name(""), json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
decodeThis{map[string]any{"a": float64(2)}},
Delim(']')}},
{CaseName: Name(""), json: `{ "obj" : [ { "a" : 1 } ] }`, expTokens: []any{
Delim('{'), "obj", Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
Delim(']'), Delim('}')}},
var tk any
var err error
{CaseName: Name(""), json: `{"obj": {"a": 1}}`, expTokens: []any{
Delim('{'), "obj",
decodeThis{map[string]any{"a": float64(1)}},
Delim('}')}},
{CaseName: Name(""), json: `{"obj": [{"a": 1}]}`, expTokens: []any{
Delim('{'), "obj",
decodeThis{[]any{
map[string]any{"a": float64(1)},
}},
Delim('}')}},
{CaseName: Name(""), json: ` [{"a": 1} {"a": 2}] `, expTokens: []any{
Delim('['),
decodeThis{map[string]any{"a": float64(1)}},
decodeThis{&SyntaxError{"expected comma after array element", 11}},
}},
{CaseName: Name(""), json: `{ "` + strings.Repeat("a", 513) + `" 1 }`, expTokens: []any{
Delim('{'), strings.Repeat("a", 513),
decodeThis{&SyntaxError{"expected colon after object key", 518}},
}},
{CaseName: Name(""), json: `{ "\a" }`, expTokens: []any{
Delim('{'),
&SyntaxError{"invalid character 'a' in string escape code", 3},
}},
{CaseName: Name(""), json: ` \a`, expTokens: []any{
&SyntaxError{"invalid character '\\\\' looking for beginning of value", 1},
}},
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
dec := NewDecoder(strings.NewReader(tt.json))
for i, want := range tt.expTokens {
var got any
var err error
if dt, ok := etk.(decodeThis); ok {
etk = dt.v
err = dec.Decode(&tk)
} else {
tk, err = dec.Token()
}
if experr, ok := etk.(error); ok {
if err == nil || !reflect.DeepEqual(err, experr) {
t.Errorf("case %v: Expected error %#v in %q, but was %#v", ci, experr, tcase.json, err)
if dt, ok := want.(decodeThis); ok {
want = dt.v
err = dec.Decode(&got)
} else {
got, err = dec.Token()
}
if errWant, ok := want.(error); ok {
if err == nil || !reflect.DeepEqual(err, errWant) {
t.Fatalf("%s:\n\tinput: %s\n\tgot error: %v\n\twant error: %v", tt.Where, tt.json, err, errWant)
}
break
} else if err != nil {
t.Fatalf("%s:\n\tinput: %s\n\tgot error: %v\n\twant error: nil", tt.Where, tt.json, err)
}
if !reflect.DeepEqual(got, want) {
t.Fatalf("%s: token %d:\n\tinput: %s\n\tgot: %T(%v)\n\twant: %T(%v)", tt.Where, i, tt.json, got, got, want, want)
}
break
} else if err == io.EOF {
t.Errorf("case %v: Unexpected EOF in %q", ci, tcase.json)
break
} else if err != nil {
t.Errorf("case %v: Unexpected error '%#v' in %q", ci, err, tcase.json)
break
}
if !reflect.DeepEqual(tk, etk) {
t.Errorf(`case %v: %q @ %v expected %T(%v) was %T(%v)`, ci, tcase.json, i, etk, etk, tk, tk)
break
}
}
})
}
}
@@ -449,7 +497,7 @@ func TestHTTPDecoding(t *testing.T) {
defer ts.Close()
res, err := http.Get(ts.URL)
if err != nil {
log.Fatalf("GET failed: %v", err)
log.Fatalf("http.Get error: %v", err)
}
defer res.Body.Close()
@@ -460,15 +508,15 @@ func TestHTTPDecoding(t *testing.T) {
d := NewDecoder(res.Body)
err = d.Decode(&foo)
if err != nil {
t.Fatalf("Decode: %v", err)
t.Fatalf("Decode error: %v", err)
}
if foo.Foo != "bar" {
t.Errorf("decoded %q; want \"bar\"", foo.Foo)
t.Errorf(`Decode: got %q, want "bar"`, foo.Foo)
}
// make sure we get the EOF the second time
err = d.Decode(&foo)
if err != io.EOF {
t.Errorf("err = %v; want io.EOF", err)
t.Errorf("Decode error:\n\tgot: %v\n\twant: io.EOF", err)
}
}

View File

@@ -72,49 +72,50 @@ type unicodeTag struct {
W string `json:"Ελλάδα"`
}
var structTagObjectKeyTests = []struct {
raw any
value string
key string
}{
{basicLatin2xTag{"2x"}, "2x", "$%-/"},
{basicLatin3xTag{"3x"}, "3x", "0123456789"},
{basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"},
{basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"},
{basicLatin6xTag{"6x"}, "6x", "abcdefghijklmno"},
{basicLatin7xTag{"7x"}, "7x", "pqrstuvwxyz"},
{miscPlaneTag{"いろはにほへと"}, "いろはにほへと", "色は匂へど"},
{dashTag{"foo"}, "foo", "-"},
{emptyTag{"Pour Moi"}, "Pour Moi", "W"},
{misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"},
{badFormatTag{"Orfevre"}, "Orfevre", "Y"},
{badCodeTag{"Reliable Man"}, "Reliable Man", "Z"},
{percentSlashTag{"brut"}, "brut", "text/html%"},
{punctuationTag{"Union Rags"}, "Union Rags", "!#$%&()*+-./:;<=>?@[]^_{|}~ "},
{spaceTag{"Perreddu"}, "Perreddu", "With space"},
{unicodeTag{"Loukanikos"}, "Loukanikos", "Ελλάδα"},
}
func TestStructTagObjectKey(t *testing.T) {
for _, tt := range structTagObjectKeyTests {
b, err := Marshal(tt.raw)
if err != nil {
t.Fatalf("Marshal(%#q) failed: %v", tt.raw, err)
}
var f any
err = Unmarshal(b, &f)
if err != nil {
t.Fatalf("Unmarshal(%#q) failed: %v", b, err)
}
for i, v := range f.(map[string]any) {
switch i {
case tt.key:
if s, ok := v.(string); !ok || s != tt.value {
t.Fatalf("Unexpected value: %#q, want %v", s, tt.value)
}
default:
t.Fatalf("Unexpected key: %#q, from %#q", i, b)
tests := []struct {
CaseName
raw any
value string
key string
}{
{Name(""), basicLatin2xTag{"2x"}, "2x", "$%-/"},
{Name(""), basicLatin3xTag{"3x"}, "3x", "0123456789"},
{Name(""), basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"},
{Name(""), basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"},
{Name(""), basicLatin6xTag{"6x"}, "6x", "abcdefghijklmno"},
{Name(""), basicLatin7xTag{"7x"}, "7x", "pqrstuvwxyz"},
{Name(""), miscPlaneTag{"いろはにほへと"}, "いろはにほへと", "色は匂へど"},
{Name(""), dashTag{"foo"}, "foo", "-"},
{Name(""), emptyTag{"Pour Moi"}, "Pour Moi", "W"},
{Name(""), misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"},
{Name(""), badFormatTag{"Orfevre"}, "Orfevre", "Y"},
{Name(""), badCodeTag{"Reliable Man"}, "Reliable Man", "Z"},
{Name(""), percentSlashTag{"brut"}, "brut", "text/html%"},
{Name(""), punctuationTag{"Union Rags"}, "Union Rags", "!#$%&()*+-./:;<=>?@[]^_{|}~ "},
{Name(""), spaceTag{"Perreddu"}, "Perreddu", "With space"},
{Name(""), unicodeTag{"Loukanikos"}, "Loukanikos", "Ελλάδα"},
}
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
b, err := Marshal(tt.raw)
if err != nil {
t.Fatalf("%s: Marshal error: %v", tt.Where, err)
}
}
var f any
err = Unmarshal(b, &f)
if err != nil {
t.Fatalf("%s: Unmarshal error: %v", tt.Where, err)
}
for k, v := range f.(map[string]any) {
if k == tt.key {
if s, ok := v.(string); !ok || s != tt.value {
t.Fatalf("%s: Unmarshal(%#q) value:\n\tgot: %q\n\twant: %q", tt.Where, b, s, tt.value)
}
} else {
t.Fatalf("%s: Unmarshal(%#q): unexpected key: %q", tt.Where, b, k)
}
}
})
}
}

View File

@@ -22,7 +22,7 @@ func TestTagParsing(t *testing.T) {
{"bar", false},
} {
if opts.Contains(tt.opt) != tt.want {
t.Errorf("Contains(%q) = %v", tt.opt, !tt.want)
t.Errorf("Contains(%q) = %v, want %v", tt.opt, !tt.want, tt.want)
}
}
}

View File

@@ -169,7 +169,7 @@ func EncodeImage(img image.Image, compression ImageCompresson) (bytes.Buffer, st
}
}
func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fillColor color.Color) (image.Image, error) {
func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fillColor color.Color) (image.Image, PercentageRectangle, error) {
iw := img.Bounds().Size().X
ih := img.Bounds().Size().Y
@@ -214,12 +214,12 @@ func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fil
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over)
return newImg, nil
return newImg, PercentageRectangle{0, 0, 1, 1}, nil
}
if fit == ImageFitContainCenter || fit == ImageFitContainTopLeft || fit == ImageFitContainTopRight || fit == ImageFitContainBottomLeft || fit == ImageFitContainBottomRight {
// image-fit:cover fills the target-bounding-box with the image, there is potentially empty-space, it potentially cuts parts of the image away
// image-fit:contain fills the target-bounding-box with the image, there is potentially empty-space, it potentially cuts parts of the image away
// we use the bigger (!) value of facW and facH,
// because the image is made to fit the bounding-box, the bigger factor (= the dimension the image is stretched less) is relevant
@@ -234,11 +234,15 @@ func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fil
// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio)
// [ow|oh] ==> size of output image (same ratio as bounding box [bbw|bbh])
ow := int(math.Round(bbw * facOut))
oh := int(math.Round(bbh * facOut))
facScale := mathext.Min(float64(ow)/float64(iw), float64(oh)/float64(ih))
// [dw|dh] ==> size of destination rect (where to draw source in output image) (same ratio as input image [iw|ih])
dw := int(math.Round(float64(iw) * facScale))
dh := int(math.Round(float64(ih) * facScale))
@@ -248,11 +252,11 @@ func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fil
if fit == ImageFitContainCenter {
destBounds = image.Rect((ow-dw)/2, (oh-dh)/2, (ow-dw)/2+dw, (oh-dh)/2+dh)
} else if fit == ImageFitContainTopLeft {
destBounds = image.Rect(0, 0, iw, dh)
destBounds = image.Rect(0, 0, dw, dh)
} else if fit == ImageFitContainTopRight {
destBounds = image.Rect(ow-iw, 0, ow, dh)
destBounds = image.Rect(ow-dw, 0, ow, dh)
} else if fit == ImageFitContainBottomLeft {
destBounds = image.Rect(0, oh-dh, iw, oh)
destBounds = image.Rect(0, oh-dh, dw, oh)
} else if fit == ImageFitContainBottomRight {
destBounds = image.Rect(ow-dw, oh-dh, ow, oh)
}
@@ -262,7 +266,7 @@ func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fil
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, destBounds, img, image.Pt(0, 0), draw.Over)
return newImg, nil
return newImg, calcRelativeRect(destBounds, newImg.Bounds()), nil
}
if fit == ImageFitStretch {
@@ -289,10 +293,10 @@ func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fil
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over)
return newImg, nil
return newImg, PercentageRectangle{0, 0, 1, 1}, nil
}
return nil, exerr.New(exerr.TypeInternal, fmt.Sprintf("unknown image-fit: '%s'", fit)).Build()
return nil, PercentageRectangle{}, exerr.New(exerr.TypeInternal, fmt.Sprintf("unknown image-fit: '%s'", fit)).Build()
}
func VerifyAndDecodeImage(data io.Reader, mime string) (image.Image, error) {

35
imageext/types.go Normal file
View File

@@ -0,0 +1,35 @@
package imageext
import "image"
type Rectangle struct {
X float64
Y float64
W float64
H float64
}
type PercentageRectangle struct {
X float64 // [0..1]
Y float64 // [0..1]
W float64 // [0..1]
H float64 // [0..1]
}
func (r PercentageRectangle) Of(ref Rectangle) Rectangle {
return Rectangle{
X: ref.X + r.X*ref.W,
Y: ref.Y + r.Y*ref.H,
W: r.W * ref.W,
H: r.H * ref.H,
}
}
func calcRelativeRect(inner image.Rectangle, outer image.Rectangle) PercentageRectangle {
return PercentageRectangle{
X: float64(inner.Min.X-outer.Min.X) / float64(outer.Dx()),
Y: float64(inner.Min.Y-outer.Min.Y) / float64(outer.Dy()),
W: float64(inner.Dx()) / float64(outer.Dx()),
H: float64(inner.Dy()) / float64(outer.Dy()),
}
}

View File

@@ -323,6 +323,16 @@ func ArrMap[T1 any, T2 any](arr []T1, conv func(v T1) T2) []T2 {
return r
}
func ArrDeRef[T1 any](arr []*T1) []T1 {
r := make([]T1, 0, len(arr))
for _, v := range arr {
if v != nil {
r = append(r, *v)
}
}
return r
}
func MapMap[TK comparable, TV any, TR any](inmap map[TK]TV, conv func(k TK, v TV) TR) []TR {
r := make([]TR, 0, len(inmap))
for k, v := range inmap {
@@ -391,6 +401,14 @@ func ArrSum[T NumberConstraint](arr []T) T {
return r
}
func ArrMapSum[T1 any, T2 NumberConstraint](arr []T1, conv func(v T1) T2) T2 {
var r T2 = 0
for _, v := range arr {
r += conv(v)
}
return r
}
func ArrFlatten[T1 any, T2 any](arr []T1, conv func(v T1) []T2) []T2 {
r := make([]T2, 0, len(arr))
for _, v1 := range arr {
@@ -474,6 +492,17 @@ func ArrAppend[T any](arr []T, add ...T) []T {
return r
}
// ArrPrepend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one
// Also - in contrast to ArrAppend - the add values are inserted at the start of the resulting array (in reverse order)
func ArrPrepend[T any](arr []T, add ...T) []T {
out := make([]T, len(arr)+len(add))
copy(out[len(add):], arr)
for i := 0; i < len(add); i++ {
out[len(add)-i-1] = add[i]
}
return out
}
// ArrCopy does a shallow copy of the 'in' array
func ArrCopy[T any](in []T) []T {
out := make([]T, len(in))
@@ -553,3 +582,18 @@ func ArrChunk[T any](arr []T, chunkSize int) [][]T {
return res
}
func ArrGroupBy[T1 any, T2 comparable](arr []T1, groupfunc func(v T1) T2) map[T2][]T1 {
r := make(map[T2][]T1)
for _, v := range arr {
key := groupfunc(v)
if _, ok := r[key]; ok {
r[key] = append(r[key], v)
} else {
r[key] = []T1{v}
}
}
return r
}

View File

@@ -2,6 +2,7 @@ package langext
import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"strings"
"testing"
)
@@ -10,3 +11,13 @@ func TestJoinString(t *testing.T) {
res := JoinString(ids, ",")
tst.AssertEqual(t, res, "1,2,3")
}
func TestArrPrepend(t *testing.T) {
v1 := []string{"1", "2", "3"}
v2 := ArrPrepend(v1, "4", "5", "6")
tst.AssertEqual(t, strings.Join(v1, ""), "123")
tst.AssertEqual(t, strings.Join(v2, ""), "654123")
}

View File

@@ -77,6 +77,14 @@ func Coalesce4Opt[T any](v1 *T, v2 *T, v3 *T, v4 *T) *T {
return v4
}
func CoalesceDblPtr[T any](v1 **T, v2 *T) *T {
if v1 != nil {
return *v1
}
return v2
}
func CoalesceString(s *string, def string) string {
if s == nil {
return def
@@ -125,6 +133,31 @@ func CoalesceStringer(s fmt.Stringer, def string) string {
}
}
func CoalesceDefault[T comparable](v1 T, def T) T {
if v1 != *new(T) {
return v1
}
return def
}
func CoalesceDefaultArr[T comparable](v1 T, vMore ...T) T {
if v1 != *new(T) {
return v1
}
if len(vMore) == 0 {
return v1
}
for i := 0; i < len(vMore)-1; i++ {
if vMore[i] != *new(T) {
return v1
}
}
return vMore[len(vMore)-1]
}
func SafeCast[T any](v any, def T) T {
switch r := v.(type) {
case T:

15
langext/io.go Normal file
View File

@@ -0,0 +1,15 @@
package langext
import "io"
type nopCloser struct {
io.Writer
}
func (n nopCloser) Close() error {
return nil // no op
}
func WriteNopCloser(w io.Writer) io.WriteCloser {
return nopCloser{w}
}

View File

@@ -66,7 +66,7 @@ func CopyMap[K comparable, V any](a map[K]V) map[K]V {
func ForceMap[K comparable, V any](v map[K]V) map[K]V {
if v == nil {
return make(map[K]V, 0)
return make(map[K]V)
} else {
return v
}

View File

@@ -11,7 +11,7 @@ func (p PanicWrappedErr) Error() string {
return "A panic occured"
}
func (p PanicWrappedErr) ReoveredObj() any {
func (p PanicWrappedErr) RecoveredObj() any {
return p.panic
}

View File

@@ -22,6 +22,13 @@ func DblPtr[T any](v T) **T {
return &v_
}
func DblPtrIfNotNil[T any](v *T) **T {
if v == nil {
return nil
}
return &v
}
func DblPtrNil[T any]() **T {
var v *T = nil
return &v

View File

@@ -88,12 +88,15 @@ func StrRunePadRight(str string, pad string, padlen int) string {
func Indent(str string, pad string) string {
eonl := strings.HasSuffix(str, "\n")
if eonl {
str = str[0 : len(str)-1]
}
r := ""
for _, v := range strings.Split(str, "\n") {
r += pad + v + "\n"
}
if eonl {
if !eonl {
r = r[0 : len(r)-1]
}
@@ -115,3 +118,21 @@ func StrRepeat(val string, count int) string {
}
return r
}
func StrWrap(val string, linelen int, seperator string) string {
res := ""
for iPos := 0; ; {
next := min(iPos+linelen, len(val))
res += val[iPos:next]
iPos = next
if iPos >= len(val) {
break
}
res += seperator
}
return res
}

152
langext/string_test.go Normal file
View File

@@ -0,0 +1,152 @@
package langext
import "testing"
func TestStrLimitBehaviour(t *testing.T) {
val := "Hello, World!"
maxlen := 5
suffix := "..."
expected := "He..."
result := StrLimit(val, maxlen, suffix)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrSplitBehaviour1(t *testing.T) {
val := "Hello,World,,"
sep := ","
expected := []string{"Hello", "World"}
result := StrSplit(val, sep, false)
if len(result) != len(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrSplitBehaviour2(t *testing.T) {
val := "Hello,World,,"
sep := ","
expected := []string{"Hello", "World", "", ""}
result := StrSplit(val, sep, true)
if len(result) != len(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrPadRightBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "Hello*****"
result := StrPadRight(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrPadLeftBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "*****Hello"
result := StrPadLeft(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRunePadLeftBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "*****Hello"
result := StrRunePadLeft(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRunePadRightBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "Hello*****"
result := StrRunePadRight(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIndentBehaviour1(t *testing.T) {
str := "Hello\nWorld"
pad := ".."
expected := "..Hello\n..World"
result := Indent(str, pad)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIndentBehaviour2(t *testing.T) {
str := "Hello\nWorld\n"
pad := ".."
expected := "..Hello\n..World\n"
result := Indent(str, pad)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRepeatBehaviour(t *testing.T) {
val := "Hello"
count := 3
expected := "HelloHelloHello"
result := StrRepeat(val, count)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour1(t *testing.T) {
val := "123456789"
linelen := 5
seperator := "\n"
expected := "12345\n6789"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour2(t *testing.T) {
val := "1234567890"
linelen := 5
seperator := "\n"
expected := "12345\n67890"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour3(t *testing.T) {
val := "****************"
linelen := 4
seperator := "\n"
expected := "****\n****\n****\n****"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour4(t *testing.T) {
val := "*****************"
linelen := 4
seperator := "\n"
expected := "****\n****\n****\n****\n*"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}

View File

@@ -1,5 +1,7 @@
package mathext
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
func ClampInt(v int, lo int, hi int) int {
if v < lo {
return lo
@@ -39,3 +41,25 @@ func ClampFloat64(v float64, lo float64, hi float64) float64 {
return v
}
}
func Clamp[T langext.NumberConstraint](v T, min T, max T) T {
if v < min {
return min
} else if v > max {
return max
} else {
return v
}
}
func ClampOpt[T langext.NumberConstraint](v *T, fallback T, min T, max T) T {
if v == nil {
return fallback
} else if *v < min {
return min
} else if *v > max {
return max
} else {
return *v
}
}

View File

@@ -87,13 +87,3 @@ func Abs[T langext.NumberConstraint](v T) T {
return v
}
}
func Clamp[T langext.NumberConstraint](v T, min T, max T) T {
if v < min {
return min
} else if v > max {
return max
} else {
return v
}
}

View File

@@ -1,6 +1,9 @@
package mathext
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import (
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func Sum[T langext.NumberConstraint](v []T) T {
total := T(0)
@@ -41,3 +44,53 @@ func ArrMax[T langext.OrderedConstraint](v []T) T {
}
return r
}
func MustPercentile[T langext.NumberConstraint](rawdata []T, percentile float64) T {
v, err := Percentile(rawdata, percentile)
if err != nil {
panic(err)
}
return v
}
func Percentile[T langext.NumberConstraint](rawdata []T, percentile float64) (T, error) {
v, err := FloatPercentile(rawdata, percentile)
if err != nil {
return T(0), err
}
return T(v), nil
}
func FloatPercentile[T langext.NumberConstraint](rawdata []T, percentile float64) (float64, error) {
if len(rawdata) == 0 {
return 0, exerr.New(exerr.TypeAssert, "no data to calculate percentile").Any("percentile", percentile).Build()
}
if percentile < 0 || percentile > 100 {
return 0, exerr.New(exerr.TypeAssert, "percentile out of range").Any("percentile", percentile).Build()
}
data := langext.ArrCopy(rawdata)
langext.Sort(data)
idxFloat := float64(len(data)-1) * (percentile / float64(100))
idxInt := int(idxFloat)
// exact match on index
if idxFloat == float64(idxInt) {
return float64(data[idxInt]), nil
}
// linear interpolation
v1 := data[idxInt]
v2 := data[idxInt+1]
weight := idxFloat - float64(idxInt)
valFloat := (float64(v1) * (1 - weight)) + (float64(v2) * weight)
return valFloat, nil
}

238
mathext/statistics_test.go Normal file
View File

@@ -0,0 +1,238 @@
package mathext
import (
"math"
"testing"
)
func TestSumIntsHappyPath(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
expected := 15
result := Sum(values)
if result != expected {
t.Errorf("Sum of %v; expected %v, got %v", values, expected, result)
}
}
func TestSumFloatsHappyPath(t *testing.T) {
values := []float64{1.1, 2.2, 3.3}
expected := 6.6
result := Sum(values)
if result != expected {
t.Errorf("Sum of %v; expected %v, got %v", values, expected, result)
}
}
func TestMeanOfInts(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
expected := 3.0
result := Mean(values)
if result != expected {
t.Errorf("Mean of %v; expected %v, got %v", values, expected, result)
}
}
func TestMedianOddNumberOfElements(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
expected := 3.0
result := Median(values)
if result != expected {
t.Errorf("Median of %v; expected %v, got %v", values, expected, result)
}
}
func TestMedianEvenNumberOfElements(t *testing.T) {
values := []float64{1, 2, 3, 4, 5, 6}
expected := 3.5
result := Median(values)
if result != expected {
t.Errorf("Median of %v; expected %v, got %v", values, expected, result)
}
}
func TestArrMinInts(t *testing.T) {
values := []int{5, 3, 9, 1, 4}
expected := 1
result := ArrMin(values)
if result != expected {
t.Errorf("ArrMin of %v; expected %v, got %v", values, expected, result)
}
}
func TestArrMaxInts(t *testing.T) {
values := []int{5, 3, 9, 1, 4}
expected := 9
result := ArrMax(values)
if result != expected {
t.Errorf("ArrMax of %v; expected %v, got %v", values, expected, result)
}
}
func TestPercentileValidInput(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
percentile := 50.0
expected := 3
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileOutOfRange(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
percentile := 150.0
_, err := Percentile(values, percentile)
if err == nil {
t.Errorf("Expected error for percentile %v out of range, got nil", percentile)
}
}
func TestPercentileValueInArray(t *testing.T) {
values := []int{1, 3, 5, 7, 9}
percentile := 40.0
expected := 4
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileValueInArray(t *testing.T) {
values := []int{1, 3, 5, 7, 9}
percentile := 40.0
expected := 4.2
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileInterpolation(t *testing.T) {
values := []float64{1.0, 2.0, 3.0, 4.0, 5.0}
percentile := 25.0
expected := 2.0
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileSingleValue(t *testing.T) {
values := []int{10}
percentile := 50.0
expected := 10
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileExactlyBetweenTwoValues(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
percentile := 62.5 // Exactly between 3 and 4
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileTwoThirdsBetweenTwoValues(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 66.666666666666
expected := 6.666666666666667 // Since 2/3 of the way between 6 and 7 is 6.666...
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileBetweenTwoValues1(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 11.0
expected := 1.1
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileBetweenTwoValues2(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 9.0
expected := 0.9
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInput(t *testing.T) {
values := []float64{5, 1, 4, 2, 3} // Unsorted input
percentile := 50.0
expected := 3.0
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInputLowPercentile(t *testing.T) {
values := []float64{10, 6, 7, 3, 2, 9, 8, 1, 4, 5} // Unsorted input
percentile := 10.0
expected := 1.9 // Expecting interpolation between 1 and 2
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInputHighPercentile(t *testing.T) {
values := []float64{10, 6, 7, 3, 2, 9, 8, 1, 4, 5} // Unsorted input
percentile := 90.0
expected := 9.1 // Expecting interpolation between 9 and 10
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileExactValueFromInput(t *testing.T) {
values := []float64{1.5, 2.5, 3.5, 4.5, 5.5}
percentile := 50.0 // Exact value from input array should be 3.5
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileInterpolatedValue(t *testing.T) {
values := []float64{1.0, 2.0, 3.0, 4.0, 5.0}
percentile := 87.5 // Interpolated value between 4.0 and 5.0
expected := 4.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileUnsortedInputExactValue(t *testing.T) {
values := []float64{5.5, 1.5, 4.5, 2.5, 3.5} // Unsorted input
percentile := 50.0
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileUnsortedInputInterpolatedValue(t *testing.T) {
values := []float64{10.5, 6.5, 7.5, 3.5, 2.5, 9.5, 8.5, 1.5, 4.5, 5.5}
percentile := 80.0 // Interpolated value between 4.0 and 5.0
expected := 8.7
result, err := FloatPercentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}

View File

@@ -1,13 +1,14 @@
package pagination
import (
"context"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
)
type MongoFilter interface {
FilterQuery() mongo.Pipeline
Sort() bson.D
FilterQuery(ctx context.Context) mongo.Pipeline
Sort(ctx context.Context) bson.D
}
type dynamicFilter struct {
@@ -15,11 +16,11 @@ type dynamicFilter struct {
sort bson.D
}
func (d dynamicFilter) FilterQuery() mongo.Pipeline {
func (d dynamicFilter) FilterQuery(ctx context.Context) mongo.Pipeline {
return d.pipeline
}
func (d dynamicFilter) Sort() bson.D {
func (d dynamicFilter) Sort(ctx context.Context) bson.D {
return d.sort
}

View File

@@ -4,11 +4,13 @@ import (
"encoding/json"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strings"
)
type ConvertStructToMapOpt struct {
KeepJsonMarshalTypes bool
MaxDepth *int
UseTagsAsKeys *string
}
func ConvertStructToMap(v any, opts ...ConvertStructToMapOpt) map[string]any {
@@ -90,7 +92,21 @@ func reflectToMap(fv reflect.Value, depth int, opt ConvertStructToMapOpt) any {
for i := 0; i < fv.NumField(); i++ {
if fv.Type().Field(i).IsExported() {
res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i), depth+1, opt)
k := fv.Type().Field(i).Name
if opt.UseTagsAsKeys != nil {
if tagval, ok := fv.Type().Field(i).Tag.Lookup(*opt.UseTagsAsKeys); ok {
if strings.Contains(tagval, ",") {
k = strings.TrimSpace(strings.Split(tagval, ",")[0])
} else {
k = strings.TrimSpace(tagval)
}
} else {
continue
}
}
res[k] = reflectToMap(fv.Field(i), depth+1, opt)
}
}

185
reflectext/structAccess.go Normal file
View File

@@ -0,0 +1,185 @@
package reflectext
import (
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"strings"
)
var ErrAccessStructInvalidFieldType = errors.New("invalid field type")
var ErrAccessStructFieldInPathWasNil = errors.New("a field in the path was nil")
var ErrAccessStructInvalidArrayIndex = errors.New("invalid array index")
var ErrAccessStructInvalidMapKey = errors.New("invalid map key")
var ErrAccessStructArrayAccess = errors.New("trying to access array")
var ErrAccessStructMapAccess = errors.New("trying to access map")
var ErrAccessStructMissingField = errors.New("missing field")
type AccessStructOpt struct {
ReturnNilOnMissingFields bool // return nil (instead of error) when a field in the path is missing (aka the supplied path is wrong)
ReturnNilOnNilPtrFields bool // return nil (instead of error) when a field in the path is nil
ReturnNilOnWrongFinalFieldType bool // return nil (instead of error) when the (final) field is not of the requested generic type
ReturnNilOnWrongIntermedFieldType bool // return nil (instead of error) when the intermediate field has an invalid type
ReturnNilOnInvalidArrayIndizes bool // return nil (instead of error) when trying to acces an array with an invalid index (not a number or out of range)
ReturnNilOnMissingMapKeys bool // return nil (instead of error) when trying to access a map with a missing key
UsedTagForKeys *string // Use this tag for key names in the struct (instead of the StructField.Name)
PreventArrayAccess bool // do not access array indizes - throw an error instead
PreventMapAccess bool // do not access maps - throw an error instead
}
func AccessJSONStruct[TResult any](v any, path string) (TResult, error) {
return AccessStructByStringPath[TResult](v, path, AccessStructOpt{UsedTagForKeys: langext.Ptr("json")})
}
func AccessStruct[TResult any](v any, path string) (TResult, error) {
return AccessStructByStringPath[TResult](v, path, AccessStructOpt{})
}
func AccessStructByArrayPath[TResult any](v any, path []string, opts ...AccessStructOpt) (TResult, error) {
opt := AccessStructOpt{}
if len(opts) > 0 {
opt = opts[0]
}
resultVal, err := accessStructByPath(reflect.ValueOf(v), path, opt)
if err != nil {
return *new(TResult), err
}
if resultValCast, ok := resultVal.(TResult); ok {
return resultValCast, nil
} else if opt.ReturnNilOnWrongFinalFieldType {
return *new(TResult), nil
} else {
return *new(TResult), ErrAccessStructInvalidFieldType
}
}
func AccessStructByStringPath[TResult any](v any, path string, opts ...AccessStructOpt) (TResult, error) {
opt := AccessStructOpt{}
if len(opts) > 0 {
opt = opts[0]
}
arrpath := strings.Split(path, ".")
resultVal, err := accessStructByPath(reflect.ValueOf(v), arrpath, opt)
if err != nil {
return *new(TResult), err
}
if resultValCast, ok := resultVal.(TResult); ok {
return resultValCast, nil
} else if opt.ReturnNilOnWrongFinalFieldType {
return *new(TResult), nil
} else {
return *new(TResult), ErrAccessStructInvalidFieldType
}
}
func accessStructByPath(val reflect.Value, path []string, opt AccessStructOpt) (any, error) {
if len(path) == 0 {
return val.Interface(), nil
}
currPath := path[0]
if val.Kind() == reflect.Ptr {
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
return accessStructByPath(val.Elem(), path, opt)
}
if val.Kind() == reflect.Array || val.Kind() == reflect.Slice {
if opt.PreventArrayAccess {
return nil, ErrAccessStructArrayAccess
}
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
arrIdx, err := strconv.ParseInt(currPath, 10, 64)
if err != nil {
if opt.ReturnNilOnInvalidArrayIndizes {
return nil, nil
} else {
return nil, ErrAccessStructInvalidArrayIndex
}
}
if arrIdx < 0 || int(arrIdx) >= val.Len() {
if opt.ReturnNilOnInvalidArrayIndizes {
return nil, nil
} else {
return nil, ErrAccessStructInvalidArrayIndex
}
}
return accessStructByPath(val.Index(int(arrIdx)), path[1:], opt)
}
if val.Kind() == reflect.Map {
if opt.PreventMapAccess {
return nil, ErrAccessStructMapAccess
}
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
mapval := val.MapIndex(reflect.ValueOf(currPath))
if !mapval.IsValid() || mapval.IsZero() {
if opt.ReturnNilOnMissingMapKeys {
return nil, nil
} else {
return nil, ErrAccessStructInvalidMapKey
}
}
return accessStructByPath(mapval, path[1:], opt)
}
if val.Kind() == reflect.Struct {
if opt.UsedTagForKeys != nil {
for i := 0; i < val.NumField(); i++ {
if val.Type().Field(i).Tag.Get(*opt.UsedTagForKeys) == currPath {
return accessStructByPath(val.Field(i), path[1:], opt)
}
}
if opt.ReturnNilOnMissingFields {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
} else {
for i := 0; i < val.NumField(); i++ {
if val.Type().Field(i).Name == currPath {
return accessStructByPath(val.Field(i), path[1:], opt)
}
}
if opt.ReturnNilOnMissingFields {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
}
}
if opt.ReturnNilOnWrongIntermedFieldType {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
}

View File

@@ -0,0 +1,259 @@
package reflectext
import "testing"
type TestStruct struct {
Name string `json:"name"`
Age int `json:"age"`
}
func TestAccessStructByArrayPath_HappyPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
result, err := AccessStructByArrayPath[string](testStruct, []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidField(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByStringPath_HappyPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
result, err := AccessStructByStringPath[string](testStruct, "Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByStringPath_InvalidField(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByStringPath[string](testStruct, "Invalid")
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type RecursiveStruct struct {
Name string
Sub *RecursiveStruct
SubSlice []RecursiveStruct
}
func TestAccessStructByArrayPath_RecursiveStruct(t *testing.T) {
testStruct := RecursiveStruct{Name: "John", Sub: &RecursiveStruct{Name: "Jane"}}
result, err := AccessStructByArrayPath[string](*testStruct.Sub, []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_RecursiveStructSlice(t *testing.T) {
testStruct := RecursiveStruct{Name: "John", SubSlice: []RecursiveStruct{{Name: "Jane"}}}
result, err := AccessStructByArrayPath[string](testStruct.SubSlice[0], []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_WrongType(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[int](testStruct, []string{"Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByArrayPath_InvalidPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Name", "Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type NestedStruct struct {
Name string
Sub *TestStruct
}
func TestAccessStructByStringPath_NestedStruct(t *testing.T) {
testStruct := NestedStruct{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
result, err := AccessStructByStringPath[string](testStruct, "Sub.Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
type DeepNestedStruct struct {
Name string
Sub *NestedStruct
}
func TestAccessStructByStringPath_DeepNestedStruct(t *testing.T) {
testStruct := DeepNestedStruct{Name: "John", Sub: &NestedStruct{Name: "Jane", Sub: &TestStruct{Name: "Doe", Age: 30}}}
result, err := AccessStructByStringPath[string](testStruct, "Sub.Sub.Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Doe" {
t.Errorf("Expected 'Doe', got '%s'", result)
}
}
type MapStruct struct {
Name string
Age int
}
type TestStructWithMap struct {
MapField map[string]MapStruct
}
func TestAccessStructByArrayPath_MapField(t *testing.T) {
testStruct := TestStructWithMap{
MapField: map[string]MapStruct{
"key": {Name: "John", Age: 30},
},
}
result, err := AccessStructByArrayPath[string](testStruct, []string{"MapField", "key", "Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidMapKey(t *testing.T) {
testStruct := TestStructWithMap{
MapField: map[string]MapStruct{
"key": {Name: "John", Age: 30},
},
}
_, err := AccessStructByArrayPath[string](testStruct, []string{"MapField", "invalid", "Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type ArrayStruct struct {
Name string
Arr []TestStruct
}
func TestAccessStructByArrayPath_ArrayField(t *testing.T) {
testStruct := ArrayStruct{
Name: "John",
Arr: []TestStruct{{Name: "Jane", Age: 30}},
}
result, err := AccessStructByArrayPath[string](testStruct, []string{"Arr", "0", "Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidArrayIndex(t *testing.T) {
testStruct := ArrayStruct{
Name: "John",
Arr: []TestStruct{{Name: "Jane", Age: 30}},
}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Arr", "1", "Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type FunctionStruct struct {
Name string
Func func() string
}
func TestAccessStructByArrayPath_FunctionField(t *testing.T) {
testStruct := FunctionStruct{Name: "John", Func: func() string { return "Hello" }}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Func"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByArrayPath_NonExistentPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"NonExistent"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type NestedStructWithTag struct {
Name string `json:"name"`
Sub *TestStruct `json:"sub"`
}
func TestAccessStructByArrayPath_UsedTagForKeys(t *testing.T) {
testStruct := NestedStructWithTag{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
tag := "json"
result, err := AccessStructByArrayPath[string](testStruct, []string{"sub", "name"}, AccessStructOpt{UsedTagForKeys: &tag})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_UsedTagForKeysInvalid(t *testing.T) {
testStruct := NestedStructWithTag{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
tag := "json"
_, err := AccessStructByArrayPath[string](testStruct, []string{"sub", "invalid"}, AccessStructOpt{UsedTagForKeys: &tag})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type DifferentTypeStruct struct {
Name string
Age int
}
func TestAccessStructByArrayPath_DifferentType(t *testing.T) {
testStruct := DifferentTypeStruct{Name: "John", Age: 30}
result, err := AccessStructByArrayPath[any](testStruct, []string{"Age"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != 30 {
t.Errorf("Expected '30', got '%v'", result)
}
}
func TestAccessStructByArrayPath_DifferentTypeInvalid(t *testing.T) {
testStruct := DifferentTypeStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[any](testStruct, []string{"Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}

View File

@@ -7,6 +7,7 @@ import (
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"sync"
"time"
)
type DB interface {
@@ -57,89 +58,121 @@ func (db *database) AddListener(listener Listener) {
func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreExecMeta{Context: ctx, TransactionConstructorContext: nil}
for _, v := range db.lstr {
err := v.PreExec(ctx, nil, &sqlstr, &prep)
err := v.PreExec(ctx, nil, &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
res, err := db.db.NamedExecContext(ctx, sqlstr, prep)
postMeta := PostExecMeta{Context: ctx, TransactionConstructorContext: nil, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostExec(nil, origsql, sqlstr, prep)
v.PostExec(nil, origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
return res, nil
}
func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreQueryMeta{Context: ctx, TransactionConstructorContext: nil}
for _, v := range db.lstr {
err := v.PreQuery(ctx, nil, &sqlstr, &prep)
err := v.PreQuery(ctx, nil, &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
rows, err := sqlx.NamedQueryContext(ctx, db.db, sqlstr, prep)
postMeta := PostQueryMeta{Context: ctx, TransactionConstructorContext: nil, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostQuery(nil, origsql, sqlstr, prep)
v.PostQuery(nil, origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to [query] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
return rows, nil
}
func (db *database) Ping(ctx context.Context) error {
t0 := time.Now()
preMeta := PrePingMeta{Context: ctx}
for _, v := range db.lstr {
err := v.PrePing(ctx)
err := v.PrePing(ctx, preMeta)
if err != nil {
return err
}
}
t1 := time.Now()
err := db.db.PingContext(ctx)
postMeta := PostPingMeta{Context: ctx, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostPing(err)
v.PostPing(err, postMeta)
}
if err != nil {
return exerr.Wrap(err, "Failed to [ping] sql database").Build()
}
return nil
}
func (db *database) BeginTransaction(ctx context.Context, iso sql.IsolationLevel) (Tx, error) {
t0 := time.Now()
db.lock.Lock()
txid := db.txctr
db.txctr += 1 // with overflow !
db.lock.Unlock()
preMeta := PreTxBeginMeta{Context: ctx}
for _, v := range db.lstr {
err := v.PreTxBegin(ctx, txid)
err := v.PreTxBegin(ctx, txid, preMeta)
if err != nil {
return nil, err
}
}
t1 := time.Now()
xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso})
postMeta := PostTxBeginMeta{Context: ctx, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostTxBegin(txid, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to start sql transaction").Build()
}
for _, v := range db.lstr {
v.PostTxBegin(txid, err)
}
return NewTransaction(xtx, txid, db), nil
return newTransaction(ctx, xtx, txid, db), nil
}
func (db *database) Exit() error {

View File

@@ -1,188 +1,266 @@
package sq
import "context"
import (
"context"
"time"
)
type PrePingMeta struct {
Context context.Context
}
type PreTxBeginMeta struct {
Context context.Context
ConstructorContext context.Context
}
type PreTxCommitMeta struct {
ConstructorContext context.Context
}
type PreTxRollbackMeta struct {
ConstructorContext context.Context
}
type PreQueryMeta struct {
Context context.Context
TransactionConstructorContext context.Context
}
type PreExecMeta struct {
Context context.Context
TransactionConstructorContext context.Context
}
type PostPingMeta struct {
Context context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostTxBeginMeta struct {
Context context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostTxCommitMeta struct {
ConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
ExecCounter int
QueryCounter int
}
type PostTxRollbackMeta struct {
ConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
ExecCounter int
QueryCounter int
}
type PostQueryMeta struct {
Context context.Context
TransactionConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostExecMeta struct {
Context context.Context
TransactionConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
}
type Listener interface {
PrePing(ctx context.Context) error
PreTxBegin(ctx context.Context, txid uint16) error
PreTxCommit(txid uint16) error
PreTxRollback(txid uint16) error
PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error
PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error
PrePing(ctx context.Context, meta PrePingMeta) error
PreTxBegin(ctx context.Context, txid uint16, meta PreTxBeginMeta) error
PreTxCommit(txid uint16, meta PreTxCommitMeta) error
PreTxRollback(txid uint16, meta PreTxRollbackMeta) error
PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error
PreExec(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error
PostPing(result error)
PostTxBegin(txid uint16, result error)
PostTxCommit(txid uint16, result error)
PostTxRollback(txid uint16, result error)
PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP)
PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP)
PostPing(result error, meta PostPingMeta)
PostTxBegin(txid uint16, result error, meta PostTxBeginMeta)
PostTxCommit(txid uint16, result error, meta PostTxCommitMeta)
PostTxRollback(txid uint16, result error, meta PostTxRollbackMeta)
PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)
PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)
}
type genListener struct {
prePing func(ctx context.Context) error
preTxBegin func(ctx context.Context, txid uint16) error
preTxCommit func(txid uint16) error
preTxRollback func(txid uint16) error
preQuery func(ctx context.Context, txID *uint16, sql *string, params *PP) error
preExec func(ctx context.Context, txID *uint16, sql *string, params *PP) error
postPing func(result error)
postTxBegin func(txid uint16, result error)
postTxCommit func(txid uint16, result error)
postTxRollback func(txid uint16, result error)
postQuery func(txID *uint16, sqlOriginal string, sqlReal string, params PP)
postExec func(txID *uint16, sqlOriginal string, sqlReal string, params PP)
prePing func(ctx context.Context, meta PrePingMeta) error
preTxBegin func(ctx context.Context, txid uint16, meta PreTxBeginMeta) error
preTxCommit func(txid uint16, meta PreTxCommitMeta) error
preTxRollback func(txid uint16, meta PreTxRollbackMeta) error
preQuery func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error
preExec func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error
postPing func(result error, meta PostPingMeta)
postTxBegin func(txid uint16, result error, meta PostTxBeginMeta)
postTxCommit func(txid uint16, result error, meta PostTxCommitMeta)
postTxRollback func(txid uint16, result error, meta PostTxRollbackMeta)
postQuery func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)
postExec func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)
}
func (g genListener) PrePing(ctx context.Context) error {
func (g genListener) PrePing(ctx context.Context, meta PrePingMeta) error {
if g.prePing != nil {
return g.prePing(ctx)
return g.prePing(ctx, meta)
} else {
return nil
}
}
func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error {
func (g genListener) PreTxBegin(ctx context.Context, txid uint16, meta PreTxBeginMeta) error {
if g.preTxBegin != nil {
return g.preTxBegin(ctx, txid)
return g.preTxBegin(ctx, txid, meta)
} else {
return nil
}
}
func (g genListener) PreTxCommit(txid uint16) error {
func (g genListener) PreTxCommit(txid uint16, meta PreTxCommitMeta) error {
if g.preTxCommit != nil {
return g.preTxCommit(txid)
return g.preTxCommit(txid, meta)
} else {
return nil
}
}
func (g genListener) PreTxRollback(txid uint16) error {
func (g genListener) PreTxRollback(txid uint16, meta PreTxRollbackMeta) error {
if g.preTxRollback != nil {
return g.preTxRollback(txid)
return g.preTxRollback(txid, meta)
} else {
return nil
}
}
func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error {
func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error {
if g.preQuery != nil {
return g.preQuery(ctx, txID, sql, params)
return g.preQuery(ctx, txID, sql, params, meta)
} else {
return nil
}
}
func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error {
func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error {
if g.preExec != nil {
return g.preExec(ctx, txID, sql, params)
return g.preExec(ctx, txID, sql, params, meta)
} else {
return nil
}
}
func (g genListener) PostPing(result error) {
func (g genListener) PostPing(result error, meta PostPingMeta) {
if g.postPing != nil {
g.postPing(result)
g.postPing(result, meta)
}
}
func (g genListener) PostTxBegin(txid uint16, result error) {
func (g genListener) PostTxBegin(txid uint16, result error, meta PostTxBeginMeta) {
if g.postTxBegin != nil {
g.postTxBegin(txid, result)
g.postTxBegin(txid, result, meta)
}
}
func (g genListener) PostTxCommit(txid uint16, result error) {
func (g genListener) PostTxCommit(txid uint16, result error, meta PostTxCommitMeta) {
if g.postTxCommit != nil {
g.postTxCommit(txid, result)
g.postTxCommit(txid, result, meta)
}
}
func (g genListener) PostTxRollback(txid uint16, result error) {
func (g genListener) PostTxRollback(txid uint16, result error, meta PostTxRollbackMeta) {
if g.postTxRollback != nil {
g.postTxRollback(txid, result)
g.postTxRollback(txid, result, meta)
}
}
func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) {
func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) {
if g.postQuery != nil {
g.postQuery(txID, sqlOriginal, sqlReal, params)
g.postQuery(txID, sqlOriginal, sqlReal, params, result, meta)
}
}
func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) {
func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) {
if g.postExec != nil {
g.postExec(txID, sqlOriginal, sqlReal, params)
g.postExec(txID, sqlOriginal, sqlReal, params, result, meta)
}
}
func NewPrePingListener(f func(ctx context.Context) error) Listener {
func NewPrePingListener(f func(ctx context.Context, meta PrePingMeta) error) Listener {
return genListener{prePing: f}
}
func NewPreTxBeginListener(f func(ctx context.Context, txid uint16) error) Listener {
func NewPreTxBeginListener(f func(ctx context.Context, txid uint16, meta PreTxBeginMeta) error) Listener {
return genListener{preTxBegin: f}
}
func NewPreTxCommitListener(f func(txid uint16) error) Listener {
func NewPreTxCommitListener(f func(txid uint16, meta PreTxCommitMeta) error) Listener {
return genListener{preTxCommit: f}
}
func NewPreTxRollbackListener(f func(txid uint16) error) Listener {
func NewPreTxRollbackListener(f func(txid uint16, meta PreTxRollbackMeta) error) Listener {
return genListener{preTxRollback: f}
}
func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener {
func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error) Listener {
return genListener{preQuery: f}
}
func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener {
func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error) Listener {
return genListener{preExec: f}
}
func NewPreListener(f func(ctx context.Context, cmdtype string, txID *uint16, sql *string, params *PP) error) Listener {
return genListener{
preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP) error {
preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error {
return f(ctx, "EXEC", txID, sql, params)
},
preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP) error {
preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error {
return f(ctx, "QUERY", txID, sql, params)
},
}
}
func NewPostPingListener(f func(result error)) Listener {
func NewPostPingListener(f func(result error, meta PostPingMeta)) Listener {
return genListener{postPing: f}
}
func NewPostTxBeginListener(f func(txid uint16, result error)) Listener {
func NewPostTxBeginListener(f func(txid uint16, result error, meta PostTxBeginMeta)) Listener {
return genListener{postTxBegin: f}
}
func NewPostTxCommitListener(f func(txid uint16, result error)) Listener {
func NewPostTxCommitListener(f func(txid uint16, result error, meta PostTxCommitMeta)) Listener {
return genListener{postTxCommit: f}
}
func NewPostTxRollbackListener(f func(txid uint16, result error)) Listener {
func NewPostTxRollbackListener(f func(txid uint16, result error, meta PostTxRollbackMeta)) Listener {
return genListener{postTxRollback: f}
}
func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener {
func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)) Listener {
return genListener{postQuery: f}
}
func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener {
func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)) Listener {
return genListener{postExec: f}
}
func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener {
func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, result error, params PP)) Listener {
return genListener{
postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) {
f("EXEC", txID, sqlOriginal, sqlReal, params)
postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) {
f("EXEC", txID, sqlOriginal, sqlReal, result, params)
},
postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) {
f("QUERY", txID, sqlOriginal, sqlReal, params)
postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) {
f("QUERY", txID, sqlOriginal, sqlReal, result, params)
},
}
}

View File

@@ -6,6 +6,7 @@ import (
"github.com/jmoiron/sqlx"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"time"
)
type TxStatus string
@@ -26,62 +27,78 @@ type Tx interface {
}
type transaction struct {
tx *sqlx.Tx
id uint16
status TxStatus
execCtr int
queryCtr int
db *database
constructorContext context.Context
tx *sqlx.Tx
id uint16
status TxStatus
execCtr int
queryCtr int
db *database
}
func NewTransaction(xtx *sqlx.Tx, txid uint16, db *database) Tx {
func newTransaction(ctx context.Context, xtx *sqlx.Tx, txid uint16, db *database) Tx {
return &transaction{
tx: xtx,
id: txid,
status: TxStatusInitial,
execCtr: 0,
queryCtr: 0,
db: db,
constructorContext: ctx,
tx: xtx,
id: txid,
status: TxStatusInitial,
execCtr: 0,
queryCtr: 0,
db: db,
}
}
func (tx *transaction) Rollback() error {
t0 := time.Now()
preMeta := PreTxRollbackMeta{ConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreTxRollback(tx.id)
err := v.PreTxRollback(tx.id, preMeta)
if err != nil {
return exerr.Wrap(err, "failed to call SQL pre-rollback listener").Int("tx.id", int(tx.id)).Build()
}
}
t1 := time.Now()
result := tx.tx.Rollback()
if result == nil {
tx.status = TxStatusRollback
}
postMeta := PostTxRollbackMeta{ConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now(), ExecCounter: tx.execCtr, QueryCounter: tx.queryCtr}
for _, v := range tx.db.lstr {
v.PostTxRollback(tx.id, result)
v.PostTxRollback(tx.id, result, postMeta)
}
return result
}
func (tx *transaction) Commit() error {
t0 := time.Now()
preMeta := PreTxCommitMeta{ConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreTxCommit(tx.id)
err := v.PreTxCommit(tx.id, preMeta)
if err != nil {
return exerr.Wrap(err, "failed to call SQL pre-commit listener").Int("tx.id", int(tx.id)).Build()
}
}
t1 := time.Now()
result := tx.tx.Commit()
if result == nil {
tx.status = TxStatusComitted
}
postMeta := PostTxCommitMeta{ConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now(), ExecCounter: tx.execCtr, QueryCounter: tx.queryCtr}
for _, v := range tx.db.lstr {
v.PostTxRollback(tx.id, result)
v.PostTxCommit(tx.id, result, postMeta)
}
return result
@@ -89,21 +106,29 @@ func (tx *transaction) Commit() error {
func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreExecMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep)
err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
res, err := tx.tx.NamedExecContext(ctx, sqlstr, prep)
tx.execCtr++
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
postMeta := PostExecMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now()}
for _, v := range tx.db.lstr {
v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep)
v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
@@ -114,21 +139,29 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re
func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreQueryMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep)
err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
rows, err := sqlx.NamedQueryContext(ctx, tx.tx, sqlstr, prep)
tx.queryCtr++
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
postMeta := PostQueryMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now()}
for _, v := range tx.db.lstr {
v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep)
v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep, err, postMeta)
}
if err != nil {

44
timeext/diff.go Normal file
View File

@@ -0,0 +1,44 @@
package timeext
import "time"
// YearDifference calculates the difference between two timestamps in years.
// = t1 - t2
// returns a float value
func YearDifference(t1 time.Time, t2 time.Time, tz *time.Location) float64 {
yDelta := float64(t1.Year() - t2.Year())
processT1 := float64(t1.Sub(TimeToYearStart(t1, tz))) / float64(TimeToYearEnd(t1, tz).Sub(TimeToYearStart(t1, tz)))
processT2 := float64(t2.Sub(TimeToYearStart(t2, tz))) / float64(TimeToYearEnd(t2, tz).Sub(TimeToYearStart(t2, tz)))
return yDelta + (processT1 - processT2)
}
// MonthDifference calculates the difference between two timestamps in months.
// = t1 - t2
// returns a float value
func MonthDifference(t1 time.Time, t2 time.Time) float64 {
yDelta := float64(t1.Year() - t2.Year())
mDelta := float64(t1.Month() - t2.Month())
dDelta := float64(0)
t1MonthDays := DaysInMonth(t1)
t2MonthDays := DaysInMonth(t2)
if t2.Year() > t1.Year() || (t2.Year() == t1.Year() && t2.Month() > t1.Month()) {
dDelta -= 1
dDelta += float64(t1MonthDays-t1.Day()) / float64(t1MonthDays)
dDelta += float64(t2.Day()) / float64(t2MonthDays)
} else if t2.Year() < t1.Year() || (t2.Year() == t1.Year() && t2.Month() < t1.Month()) {
dDelta -= 1
dDelta += float64(t1.Day()) / float64(t1MonthDays)
dDelta += float64(t2MonthDays-t2.Day()) / float64(t2MonthDays)
} else {
dDelta += float64(t1.Day()-t2.Day()) / float64(t1MonthDays)
}
return yDelta*12 + mDelta + dDelta
}

143
timeext/diff_test.go Normal file
View File

@@ -0,0 +1,143 @@
package timeext
import (
"math"
"testing"
"time"
)
func TestYearDifferenceWithSameYearAndDay(t *testing.T) {
t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.0
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithOneYearApart(t *testing.T) {
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 1.0
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithDifferentMonths(t *testing.T) {
t1 := time.Date(2020, 6, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.4166666666666667 // Approximation of 5/12 months
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceAcrossYears(t *testing.T) {
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 6, 1, 0, 0, 0, 0, time.UTC)
expected := 0.5833333333333334 // Approximation of 7/12 months
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithTimezone(t *testing.T) {
tz, _ := time.LoadLocation("America/New_York")
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, tz)
t2 := time.Date(2020, 6, 1, 0, 0, 0, 0, tz)
expected := 0.5833333333333334 // Same as UTC but ensuring timezone is considered
result := YearDifference(t1, t2, tz)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithNegativeDifference(t *testing.T) {
t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, TimezoneBerlin)
t2 := time.Date(2021, 1, 1, 0, 0, 0, 0, TimezoneBerlin)
expected := -1.0
result := YearDifference(t1, t2, TimezoneBerlin)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithNegativeDifference2(t *testing.T) {
t1 := time.Date(2020, 7, 1, 0, 0, 0, 0, TimezoneBerlin)
t2 := time.Date(2021, 7, 1, 0, 0, 0, 0, TimezoneBerlin)
expected := -1.0
result := YearDifference(t1, t2, TimezoneBerlin)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func epsilonEquals(a, b float64) bool {
epsilon := 0.01
return math.Abs(a-b) < epsilon
}
func TestMonthDifferenceSameDate(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceSameMonth(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 31, 0, 0, 0, 0, time.UTC)
expected := 0.967741935483871 // Approximation of 30/31 days
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceDifferentMonthsSameYear(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 3, 1, 0, 0, 0, 0, time.UTC)
expected := 2.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceDifferentYears(t *testing.T) {
t1 := time.Date(2021, 12, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 2, 1, 0, 0, 0, 0, time.UTC)
expected := 2.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceT1BeforeT2(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 6, 1, 0, 0, 0, 0, time.UTC)
expected := 5.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceT1AfterT2(t *testing.T) {
t1 := time.Date(2022, 6, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
expected := -5.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}

View File

@@ -65,6 +65,10 @@ func TimeToYearEnd(t time.Time, tz *time.Location) time.Time {
return TimeToYearStart(t, tz).AddDate(1, 0, 0).Add(-1)
}
func TimeToNextYearStart(t time.Time, tz *time.Location) time.Time {
return TimeToYearStart(t, tz).AddDate(1, 0, 0)
}
// IsSameDayIncludingDateBoundaries returns true if t1 and t2 are part of the same day (TZ/Berlin), the boundaries of the day are
// inclusive, this means 2021-09-15T00:00:00 is still part of the day 2021-09-14
func IsSameDayIncludingDateBoundaries(t1 time.Time, t2 time.Time, tz *time.Location) bool {
@@ -156,7 +160,7 @@ func SubtractYears(t time.Time, yearCount float64, tz *time.Location) time.Time
intCount, floatCount := math.Modf(yearCount)
t.AddDate(-int(intCount), 0, 0)
t = t.AddDate(-int(intCount), 0, 0)
t0 := TimeToYearStart(t, tz)
t1 := TimeToYearEnd(t, tz)
@@ -173,10 +177,17 @@ func AddYears(t time.Time, yearCount float64, tz *time.Location) time.Time {
intCount, floatCount := math.Modf(yearCount)
t.AddDate(int(intCount), 0, 0)
t = t.AddDate(int(intCount), 0, 0)
t0 := TimeToYearStart(t, tz)
t1 := TimeToYearEnd(t, tz)
return t.Add(time.Duration(float64(t1.Sub(t0)) * floatCount))
}
func DaysInMonth(t time.Time) int {
// https://stackoverflow.com/a/73882035/1761622
y, m, _ := t.Date()
return time.Date(y, m+1, 0, 0, 0, 0, 0, time.UTC).Day()
}

229
timeext/time_test.go Normal file
View File

@@ -0,0 +1,229 @@
package timeext
import (
"testing"
"time"
)
func TestTimeToDayStart(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 1, 13, 14, 15, 0, tz)
expected := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
result := TimeToDayStart(tm, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestTimeToDayEnd(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 1, 13, 14, 15, 0, tz)
expected := time.Date(2022, 1, 2, 0, 0, 0, 0, tz).Add(-1)
result := TimeToDayEnd(tm, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIsSameDayIncludingDateBoundaries(t *testing.T) {
tz := TimezoneBerlin
t1 := time.Date(2022, 1, 1, 23, 59, 59, 0, tz)
t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, tz)
if !IsSameDayIncludingDateBoundaries(t1, t2, tz) {
t.Errorf("Expected %v and %v to be the same day", t1, t2)
}
}
func TestIsDatePartEqual(t *testing.T) {
tz := TimezoneBerlin
t1 := time.Date(2022, 1, 1, 23, 59, 59, 0, tz)
t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
if !IsDatePartEqual(t1, t2, tz) {
t.Errorf("Expected %v and %v to have the same date part", t1, t2)
}
}
func TestWithTimePart(t *testing.T) {
tz := TimezoneBerlin
base := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
expected := time.Date(2022, 1, 1, 13, 14, 15, 0, tz)
result := WithTimePart(base, 13, 14, 15)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestCombineDateAndTime(t *testing.T) {
tz := TimezoneBerlin
d := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
tm := time.Date(0, 0, 0, 13, 14, 15, 0, tz)
expected := time.Date(2022, 1, 1, 13, 14, 15, 0, tz)
result := CombineDateAndTime(d, tm)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIsSunday(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 2, 0, 0, 0, 0, tz) // 2nd January 2022 is a Sunday
if !IsSunday(tm, tz) {
t.Errorf("Expected %v to be a Sunday", tm)
}
}
func TestIsSunday_OnSunday(t *testing.T) {
sunday := time.Date(2022, 5, 15, 0, 0, 0, 0, TimezoneBerlin) // A Sunday
if !IsSunday(sunday, TimezoneBerlin) {
t.Errorf("Expected true for Sunday")
}
}
func TestDurationFromTime(t *testing.T) {
expected := time.Duration(13*time.Hour + 14*time.Minute + 15*time.Second)
result := DurationFromTime(13, 14, 15)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestMin(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, time.UTC)
expected := t1
result := Min(t1, t2)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestMax(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, time.UTC)
expected := t2
result := Max(t1, t2)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestUnixFloatSeconds(t *testing.T) {
v := 1640995200.0 // 1st January 2022 00:00:00 UTC in Unix timestamp
expected := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
result := UnixFloatSeconds(v)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestFloorTime(t *testing.T) {
tm := time.Date(2022, 1, 1, 13, 14, 15, 0, time.UTC)
expected := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
result := FloorTime(tm)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestSubtractYears(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
expected := time.Date(2021, 1, 1, 0, 0, 0, 0, tz)
result := SubtractYears(tm, 1, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
expected = time.Date(2020, 1, 1, 0, 0, 0, 0, tz)
result = SubtractYears(tm, 2, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
expected = time.Date(2019, 1, 1, 0, 0, 0, 0, tz)
result = SubtractYears(tm, 3, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
expected = time.Date(2025, 1, 1, 0, 0, 0, 0, tz)
result = SubtractYears(tm, -3, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestAddYears(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
expected := time.Date(2023, 1, 1, 0, 0, 0, 0, tz)
result := AddYears(tm, 1, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIsDatePartEqual_SameDateDifferentTimes(t *testing.T) {
tz := time.UTC
t1 := time.Date(2022, 5, 18, 10, 30, 0, 0, tz)
t2 := time.Date(2022, 5, 18, 20, 45, 0, 0, tz)
if !IsDatePartEqual(t1, t2, tz) {
t.Errorf("Expected dates to be equal")
}
}
func TestWithTimePart_ChangeTime(t *testing.T) {
base := time.Date(2022, 5, 18, 0, 0, 0, 0, time.UTC)
result := WithTimePart(base, 15, 30, 45)
expected := time.Date(2022, 5, 18, 15, 30, 45, 0, time.UTC)
if !result.Equal(expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestCombineDateAndTime_CombineDifferentParts(t *testing.T) {
date := time.Date(2022, 5, 18, 0, 0, 0, 0, time.UTC)
timePart := time.Date(2000, 1, 1, 15, 30, 45, 0, time.UTC)
result := CombineDateAndTime(date, timePart)
expected := time.Date(2022, 5, 18, 15, 30, 45, 0, time.UTC)
if !result.Equal(expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestDaysInMonth_31Days(t *testing.T) {
date := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) // January
expected := 31
result := DaysInMonth(date)
if result != expected {
t.Errorf("Expected %d but got %d", expected, result)
}
}
func TestDaysInMonth_30Days(t *testing.T) {
date := time.Date(2022, 4, 1, 0, 0, 0, 0, time.UTC) // April
expected := 30
result := DaysInMonth(date)
if result != expected {
t.Errorf("Expected %d but got %d", expected, result)
}
}
func TestDaysInMonth_FebruaryLeapYear(t *testing.T) {
date := time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC) // February in a leap year
expected := 29
result := DaysInMonth(date)
if result != expected {
t.Errorf("Expected %d but got %d", expected, result)
}
}
func TestDaysInMonth_FebruaryNonLeapYear(t *testing.T) {
date := time.Date(2021, 2, 1, 0, 0, 0, 0, time.UTC) // February in a non-leap year
expected := 28
result := DaysInMonth(date)
if result != expected {
t.Errorf("Expected %d but got %d", expected, result)
}
}

View File

@@ -120,25 +120,25 @@ func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirecti
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)
if err != nil {
return ct.CursorToken{}, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
return nil, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
}
valueSeconary := ""
if fieldSecondary != nil && dirSecondary != nil {
valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary)
if err != nil {
return ct.CursorToken{}, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
return nil, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
}
}
return ct.CursorToken{
Mode: ct.CTMNormal,
ValuePrimary: valuePrimary,
ValueSecondary: valueSeconary,
Direction: dirPrimary,
PageSize: langext.Coalesce(pageSize, 0),
Extra: ct.Extra{},
}, nil
return ct.NewKeySortToken(
valuePrimary,
valueSeconary,
dirPrimary,
dirPrimary,
langext.Coalesce(pageSize, 0),
ct.Extra{},
), nil
}
func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool {

View File

@@ -19,6 +19,8 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
res, err := c.decodeAll(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode values").Build()
@@ -38,6 +40,8 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {
@@ -60,6 +64,8 @@ func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline,
return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {

View File

@@ -64,6 +64,8 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
res, err := c.decodeAll(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode values").Build()

View File

@@ -12,7 +12,11 @@ import (
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
r, err := c.findOneInternal(ctx, filter, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build()
if filterId, ok := filter["_id"]; ok {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Any("filter", filter).Any("filter_id", filterId).Build()
} else {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Any("filter", filter).Build()
}
}
return *r, nil
@@ -21,7 +25,7 @@ func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error)
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
r, err := c.findOneInternal(ctx, filter, true)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build()
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Any("filter", filter).Build()
}
return r, nil
@@ -58,7 +62,11 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN
return nil, nil
}
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).NoLog().Build()
if filterId, ok := filter["_id"]; ok {
return nil, exerr.Wrap(err, "mongo-query[find-one|internal] failed").Str("collection", c.Name()).Any("filter", filter).Any("filter_id", filterId).NoLog().Build()
} else {
return nil, exerr.Wrap(err, "mongo-query[find-one|internal] failed").Str("collection", c.Name()).Any("filter", filter).NoLog().Build()
}
}
return &res, nil
@@ -80,6 +88,8 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build()
}
defer func() { _ = cursor.Close(ctx) }()
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {

View File

@@ -10,6 +10,28 @@ import (
)
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
if inTok == nil {
inTok = ct.Start()
}
if ctks, ok := inTok.(ct.CTKeySort); ok {
d, tok, err := c.listWithKSToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else if ctks, ok := inTok.(ct.CTPaginated); ok {
d, tok, err := c.listWithPaginatedToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else {
return nil, ct.End(), exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build()
}
}
func (c *Coll[TData]) listWithKSToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTKeySort) ([]TData, ct.CursorToken, error) {
if inTok.Mode == ct.CTMEnd {
return make([]TData, 0), ct.End(), nil
}
@@ -25,8 +47,8 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
pd2 := ct.SortASC
if filter != nil {
pipeline = filter.FilterQuery()
pf1, pd1, pf2, pd2 = filter.Pagination()
pipeline = filter.FilterQuery(ctx)
pf1, pd1, pf2, pd2 = filter.Pagination(ctx)
}
sortPrimary := pf1
@@ -41,7 +63,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
if err != nil {
return nil, ct.CursorToken{}, exerr.
return nil, nil, exerr.
Wrap(err, "failed to create pagination").
WithType(exerr.TypeCursorTokenDecode).
Str("collection", c.Name()).
@@ -66,14 +88,16 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
// fast branch
if pageSize == nil {
entries, err := c.decodeAll(ctx, cursor)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build()
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
}
return entries, ct.End(), nil
}
@@ -83,7 +107,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
var entry TData
entry, err = c.decodeSingle(ctx, cursor)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build()
return nil, nil, exerr.Wrap(err, "failed to decode entity").Build()
}
entities = append(entities, entry)
}
@@ -98,18 +122,76 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build()
return nil, nil, exerr.Wrap(err, "failed to create (out)-token").Build()
}
return entities, nextToken, nil
}
func (c *Coll[TData]) listWithPaginatedToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTPaginated) ([]TData, ct.CursorToken, error) {
var err error
page := inTok.Page
if page < 0 {
page = 1
}
pipelineSort := mongo.Pipeline{}
pipelineFilter := mongo.Pipeline{}
if filter != nil {
pipelineFilter = filter.FilterQuery(ctx)
pf1, pd1, pf2, pd2 := filter.Pagination(ctx)
pipelineSort, err = createSortOnlyPipeline(pf1, pd1, &pf2, &pd2)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to create sort pipeline").Build()
}
}
pipelinePaginate := mongo.Pipeline{}
if pageSize != nil {
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *pageSize * (page - 1)}})
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *pageSize}})
} else {
page = 1
}
pipelineCount := mongo.Pipeline{}
pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}})
extrModPipelineResolved := mongo.Pipeline{}
for _, ppl := range c.extraModPipeline {
extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx))
}
pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
if err != nil {
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
entities, err := c.decodeAll(ctx, cursorList)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
}
tokOut := ct.Page(page + 1)
if pageSize == nil || len(entities) < *pageSize {
tokOut = ct.PageEnd()
}
return entities, tokOut, nil
}
func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) {
type countRes struct {
Count int64 `bson:"c"`
}
pipeline := filter.FilterQuery()
pipeline := filter.FilterQuery(ctx)
pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}})
@@ -118,6 +200,8 @@ func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, er
return 0, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
if cursor.Next(ctx) {
v := countRes{}
err = cursor.Decode(&v)
@@ -131,17 +215,44 @@ func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, er
}
func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, int64, error) {
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
count, err := c.Count(ctx, filter)
if err != nil {
return nil, ct.CursorToken{}, 0, err
}
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, ct.CursorToken{}, 0, err
if pageSize != nil && *pageSize == 0 {
// fast track, we return an empty list and do not advance the cursor token
count, err := c.Count(ctx, filter)
if err != nil {
return nil, nil, 0, err
}
return make([]TData, 0), inTok, count, nil
} else if pageSize == nil && inTok.IsStart() {
// fast track, we simply return len(entries) for count (we query all anyway)
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, nil, 0, err
}
return data, token, int64(len(data)), nil
} else {
count, err := c.Count(ctx, filter)
if err != nil {
return nil, nil, 0, err
}
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, nil, 0, err
}
return data, token, count, nil
}
return data, token, count, nil
}
func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]string, error) {
@@ -152,7 +263,7 @@ func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]st
pipelineFilter := mongo.Pipeline{}
if filter != nil {
pipelineFilter = filter.FilterQuery()
pipelineFilter = filter.FilterQuery(ctx)
}
extrModPipelineResolved := mongo.Pipeline{}
@@ -180,7 +291,7 @@ func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]st
return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil
}
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) {
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CTKeySort, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) {
cond := bson.A{}
sort := bson.D{}
@@ -211,7 +322,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{"$or": bson.A{bson.M{fieldPrimary: valuePrimary}, bson.M{fieldPrimary: nil}, bson.M{fieldPrimary: bson.M{"$exists": false}}}},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
@@ -221,7 +332,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{"$or": bson.A{bson.M{fieldPrimary: valuePrimary}, bson.M{fieldPrimary: nil}, bson.M{fieldPrimary: bson.M{"$exists": false}}}},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
@@ -261,3 +372,33 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken
return pipeline, pipelineSort, nil
}
func createSortOnlyPipeline(fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection) ([]bson.D, error) {
sort := bson.D{}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
if *sortSecondary == ct.SortASC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1})
}
}
pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}}
return pipelineSort, nil
}

View File

@@ -23,8 +23,8 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page
sort := bson.D{}
if filter != nil {
pipelineFilter = filter.FilterQuery()
sort = filter.Sort()
pipelineFilter = filter.FilterQuery(ctx)
sort = filter.Sort(ctx)
}
if len(sort) != 0 {
@@ -60,19 +60,27 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page
return nil, pag.Pagination{}, exerr.Wrap(err, "failed to all-decode entities").Build()
}
cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
var tcRes totalCountResult
if cursorTotalCount.Next(ctx) {
err = cursorTotalCount.Decode(&tcRes)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
if limit == nil {
// optimization, limit==nil, so we query all entities anyway, just use the array length
tcRes.Count = len(entities)
} else {
tcRes.Count = 0 // no entries in DB
cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
if cursorTotalCount.Next(ctx) {
err = cursorTotalCount.Decode(&tcRes)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
} else {
tcRes.Count = 0 // no entries in DB
}
}
paginationObj := pag.Pagination{

1
wpdf/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
wpdf_test.pdf

BIN
wpdf/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

9
wpdf/utils.go Normal file
View File

@@ -0,0 +1,9 @@
package wpdf
func hexToColor(c uint32) PDFColor {
return PDFColor{R: int((c >> 16) & 0xFF), G: int((c >> 8) & 0xFF), B: int((c >> 0) & 0xFF)}
}
func rgbToColor(r, g, b int) PDFColor {
return PDFColor{R: r, G: g, B: b}
}

View File

@@ -3,6 +3,7 @@ package wpdf
import (
"bytes"
"github.com/jung-kurt/gofpdf"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type WPDFBuilder struct {
@@ -13,6 +14,7 @@ type WPDFBuilder struct {
fontName PDFFontFamily
fontStyle PDFFontStyle
fontSize float64
debug bool
}
type PDFMargins struct {
@@ -61,6 +63,19 @@ func (b *WPDFBuilder) SetMargins(v PDFMargins) {
func (b *WPDFBuilder) AddPage() {
b.b.AddPage()
if b.debug {
ml, mt, mr, mb := b.GetMargins()
pw, ph := b.GetPageSize()
b.Rect(pw-ml-mr, ph-mt-mb, RectOutline, NewPDFRectOpt().X(ml).Y(mt).LineWidth(0.25).DrawColor(0, 0, 128))
b.Rect(pw, mt, RectFill, NewPDFRectOpt().X(0).Y(0).FillColor(0, 0, 255).Alpha(0.2, BlendNormal))
b.Rect(ml, ph-mt-mb, RectFill, NewPDFRectOpt().X(0).Y(mt).FillColor(0, 0, 255).Alpha(0.2, BlendNormal))
b.Rect(mr, ph-mt-mb, RectFill, NewPDFRectOpt().X(pw-mr).Y(mt).FillColor(0, 0, 255).Alpha(0.2, BlendNormal))
b.Rect(pw, mb, RectFill, NewPDFRectOpt().X(0).Y(ph-mb).FillColor(0, 0, 255).Alpha(0.2, BlendNormal))
}
}
func (b *WPDFBuilder) SetTextColor(cr, cg, cb int) {
@@ -105,12 +120,38 @@ func (b *WPDFBuilder) SetFont(fontName PDFFontFamily, fontStyle PDFFontStyle, fo
b.cellHeight = b.b.PointConvert(fontSize)
}
func (b *WPDFBuilder) GetFontSize() float64 {
return b.fontSize
}
func (b *WPDFBuilder) GetFontFamily() PDFFontStyle {
return b.fontStyle
}
func (b *WPDFBuilder) GetFontStyle() float64 {
return b.fontSize
}
func (b *WPDFBuilder) SetCellSpacing(h float64) {
b.cellSpacing = h
}
func (b *WPDFBuilder) Ln(h float64) {
xBefore, yBefore := b.GetXY()
b.b.Ln(h)
yAfter := b.GetY()
if b.debug {
_, _, mr, _ := b.GetMargins()
pw, _ := b.GetPageSize()
b.Rect(pw-mr-xBefore, yAfter-yBefore, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(128, 128, 0).Alpha(0.5, BlendNormal))
b.Rect(pw-mr-xBefore, yAfter-yBefore, RectFill, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).FillColor(128, 128, 0).Alpha(0.1, BlendNormal))
b.Line(xBefore, yBefore, pw-mr, yAfter, NewPDFLineOpt().LineWidth(0.25).DrawColor(128, 128, 0))
}
}
func (b *WPDFBuilder) Build() ([]byte, error) {
@@ -126,6 +167,10 @@ func (b *WPDFBuilder) SetX(x float64) {
b.b.SetX(x)
}
func (b *WPDFBuilder) IncX(dx float64) {
b.b.SetX(b.b.GetX() + dx)
}
func (b *WPDFBuilder) GetX() float64 {
return b.b.GetX()
}
@@ -188,6 +233,48 @@ func (b *WPDFBuilder) GetWorkAreaWidth() float64 {
return b.GetPageWidth() - b.GetMarginLeft() - b.GetMarginRight()
}
func (b *WPDFBuilder) GetStringWidth(str string) float64 {
func (b *WPDFBuilder) SetAutoPageBreak(auto bool, margin float64) {
b.b.SetAutoPageBreak(auto, margin)
}
func (b *WPDFBuilder) SetFooterFunc(fnc func()) {
b.b.SetFooterFunc(fnc)
}
func (b *WPDFBuilder) PageNo() int {
return b.b.PageNo()
}
func (b *WPDFBuilder) Bookmark(txtStr string, level int, y float64) {
b.b.Bookmark(b.tr(txtStr), level, y)
}
func (b *WPDFBuilder) GetStringWidth(str string, opts ...PDFCellOpt) float64 {
var fontNameOverride *PDFFontFamily
var fontStyleOverride *PDFFontStyle
var fontSizeOverride *float64
for _, opt := range opts {
fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride)
fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride)
fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride)
}
if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil {
oldFontName := b.fontName
oldFontStyle := b.fontStyle
oldFontSize := b.fontSize
newFontName := langext.Coalesce(fontNameOverride, oldFontName)
newFontStyle := langext.Coalesce(fontStyleOverride, oldFontStyle)
newFontSize := langext.Coalesce(fontSizeOverride, oldFontSize)
b.SetFont(newFontName, newFontStyle, newFontSize)
defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }()
}
return b.b.GetStringWidth(str)
}
func (b *WPDFBuilder) Debug(v bool) {
b.debug = v
}

View File

@@ -1,6 +1,9 @@
package wpdf
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type PDFCellOpt struct {
width *float64
@@ -14,9 +17,15 @@ type PDFCellOpt struct {
fontNameOverride *PDFFontFamily
fontStyleOverride *PDFFontStyle
fontSizeOverride *float64
alphaOverride *dataext.Tuple[float64, PDFBlendMode]
extraLn *float64
x *float64
autoWidth *bool
textColor *PDFColor
borderColor *PDFColor
fillColor *PDFColor
autoWidthPaddingX *float64
debug *bool
}
func NewPDFCellOpt() *PDFCellOpt {
@@ -110,12 +119,80 @@ func (opt *PDFCellOpt) AutoWidth() *PDFCellOpt {
return opt
}
func (opt *PDFCellOpt) AutoWidthPaddingX(v float64) *PDFCellOpt {
opt.autoWidthPaddingX = &v
return opt
}
func (opt *PDFCellOpt) TextColor(cr, cg, cb int) *PDFCellOpt {
opt.textColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFCellOpt) TextColorHex(c uint32) *PDFCellOpt {
opt.textColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFCellOpt) BorderColor(cr, cg, cb int) *PDFCellOpt {
opt.borderColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFCellOpt) BorderColorHex(c uint32) *PDFCellOpt {
opt.borderColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFCellOpt) FillColor(cr, cg, cb int) *PDFCellOpt {
opt.fillColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFCellOpt) FillColorHex(c uint32) *PDFCellOpt {
opt.fillColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFCellOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFCellOpt {
opt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode}
return opt
}
func (opt *PDFCellOpt) Debug(v bool) *PDFCellOpt {
opt.debug = &v
return opt
}
func (opt *PDFCellOpt) Copy() *PDFCellOpt {
c := *opt
return &c
}
func (opt *PDFCellOpt) ToMulti() *PDFMultiCellOpt {
return &PDFMultiCellOpt{
width: opt.width,
height: opt.height,
border: opt.border,
align: opt.align,
fill: opt.fill,
fontNameOverride: opt.fontNameOverride,
fontStyleOverride: opt.fontStyleOverride,
fontSizeOverride: opt.fontSizeOverride,
extraLn: opt.extraLn,
x: opt.x,
textColor: opt.textColor,
borderColor: opt.borderColor,
fillColor: opt.fillColor,
}
}
func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) {
txtTR := b.tr(txt)
width := float64(0)
height := b.cellHeight + b.cellSpacing
var height *float64 = nil
border := BorderNone
ln := BreakToNextLine
align := AlignLeft
@@ -125,13 +202,19 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) {
var fontNameOverride *PDFFontFamily
var fontStyleOverride *PDFFontStyle
var fontSizeOverride *float64
var alphaOverride *dataext.Tuple[float64, PDFBlendMode]
extraLn := float64(0)
var x *float64
autoWidth := false
var textColor *PDFColor
var borderColor *PDFColor
var fillColor *PDFColor
autoWidthPaddingX := float64(0)
debug := b.debug
for _, opt := range opts {
width = langext.Coalesce(opt.width, width)
height = langext.Coalesce(opt.height, height)
height = langext.CoalesceOpt(opt.height, height)
border = langext.Coalesce(opt.border, border)
ln = langext.Coalesce(opt.ln, ln)
align = langext.Coalesce(opt.align, align)
@@ -141,9 +224,15 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) {
fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride)
fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride)
fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride)
alphaOverride = langext.CoalesceOpt(opt.alphaOverride, alphaOverride)
extraLn = langext.Coalesce(opt.extraLn, extraLn)
x = langext.CoalesceOpt(opt.x, x)
autoWidth = langext.Coalesce(opt.autoWidth, autoWidth)
textColor = langext.CoalesceOpt(opt.textColor, textColor)
borderColor = langext.CoalesceOpt(opt.borderColor, borderColor)
fillColor = langext.CoalesceOpt(opt.fillColor, fillColor)
autoWidthPaddingX = langext.Coalesce(opt.autoWidthPaddingX, autoWidthPaddingX)
debug = langext.Coalesce(opt.debug, debug)
}
if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil {
@@ -157,15 +246,56 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) {
defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }()
}
if height == nil {
// (do after SetFont, so that b.cellHeight is correctly set to fontOverride)
height = langext.Ptr(b.cellHeight + b.cellSpacing)
}
if textColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetTextColor()
b.SetTextColor(textColor.R, textColor.G, textColor.B)
defer func() { b.SetTextColor(oldColorR, oldColorG, oldColorB) }()
}
if borderColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetDrawColor()
b.SetDrawColor(borderColor.R, borderColor.G, borderColor.B)
defer func() { b.SetDrawColor(oldColorR, oldColorG, oldColorB) }()
}
if fillColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetFillColor()
b.SetFillColor(fillColor.R, fillColor.G, fillColor.B)
defer func() { b.SetFillColor(oldColorR, oldColorG, oldColorB) }()
}
if alphaOverride != nil {
oldA, oldBMS := b.b.GetAlpha()
b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2))
defer func() { b.b.SetAlpha(oldA, oldBMS) }()
}
if x != nil {
b.b.SetX(*x)
}
if autoWidth {
width = b.b.GetStringWidth(txtTR)
width = b.GetStringWidth(txtTR, langext.ArrDeRef(opts)...) + autoWidthPaddingX
}
b.b.CellFormat(width, height, txtTR, string(border), int(ln), string(align), fill, link, linkStr)
xBefore, yBefore := b.b.GetXY()
b.b.CellFormat(width, *height, txtTR, string(border), int(ln), string(align), fill, link, linkStr)
if debug {
if ln == BreakToNextLine {
b.Rect(b.GetPageWidth()-xBefore-b.GetMarginRight(), *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0))
} else if ln == BreakToRight {
b.Rect(b.GetX()-xBefore, *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0))
} else if ln == BreakToBelow {
b.Rect(b.GetPageWidth()-xBefore-b.GetMarginRight(), *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0))
}
}
if extraLn != 0 {
b.b.Ln(extraLn)

View File

@@ -74,6 +74,35 @@ const (
RectFillOutline PDFRectStyle = "FD"
)
type PDFBlendMode string
const (
BlendNormal PDFBlendMode = "Normal"
BlendMultiply PDFBlendMode = "Multiply"
BlendScreen PDFBlendMode = "Screen"
BlendOverlay PDFBlendMode = "Overlay"
BlendDarken PDFBlendMode = "Darken"
BlendLighten PDFBlendMode = "Lighten"
BlendColorDodge PDFBlendMode = "ColorDodge"
BlendColorBurn PDFBlendMode = "ColorBurn"
BlendHardLight PDFBlendMode = "HardLight"
BlendSoftLight PDFBlendMode = "SoftLight"
BlendDifference PDFBlendMode = "Difference"
BlendExclusion PDFBlendMode = "Exclusion"
BlendHue PDFBlendMode = "Hue"
BlendSaturation PDFBlendMode = "Saturation"
BlendColor PDFBlendMode = "Color"
BlendLuminosity PDFBlendMode = "Luminosity"
)
type PDFLineCapStyle string
const (
CapButt PDFLineCapStyle = "butt"
CapRound PDFLineCapStyle = "round"
CapSquare PDFLineCapStyle = "square"
)
const (
BackgroundFill = true
BackgroundTransparent = false

View File

@@ -3,10 +3,12 @@ package wpdf
import (
"bytes"
"github.com/jung-kurt/gofpdf"
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/imageext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"image"
"image/color"
"image/draw"
"net/http"
)
@@ -64,7 +66,12 @@ func (b *WPDFBuilder) RegisterImage(bin []byte, opts ...*PDFImageRegisterOpt) *P
}
if imageType == "" {
ct := http.DetectContentType(bin[:512])
ct := ""
if len(bin) > 512 {
ct = http.DetectContentType(bin[:512])
} else {
ct = http.DetectContentType(bin)
}
switch ct {
case "image/jpg":
imageType = "JPG"
@@ -124,6 +131,8 @@ type PDFImageOpt struct {
compression *imageext.ImageCompresson
reEncodePixelPerMM *float64
crop *imageext.ImageCrop
alphaOverride *dataext.Tuple[float64, PDFBlendMode]
debug *bool
}
func NewPDFImageOpt() *PDFImageOpt {
@@ -150,6 +159,11 @@ func (opt *PDFImageOpt) Height(v float64) *PDFImageOpt {
return opt
}
func (opt *PDFImageOpt) Debug(v bool) *PDFImageOpt {
opt.debug = &v
return opt
}
func (opt *PDFImageOpt) Flow(v bool) *PDFImageOpt {
opt.flow = &v
return opt
@@ -211,6 +225,11 @@ func (opt *PDFImageOpt) Crop(cropX float64, cropY float64, cropWidth float64, cr
return opt
}
func (opt *PDFImageOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFImageOpt {
opt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode}
return opt
}
func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) {
var err error
@@ -228,7 +247,9 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) {
var imageFit *imageext.ImageFit = nil
var fillColor color.Color = color.Transparent
compression := imageext.CompressionPNGSpeed
debug := b.debug
var crop *imageext.ImageCrop = nil
var alphaOverride *dataext.Tuple[float64, PDFBlendMode]
for _, opt := range opts {
x = langext.Coalesce(opt.x, x)
@@ -246,10 +267,18 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) {
compression = langext.Coalesce(opt.compression, compression)
reEncodePixelPerMM = langext.Coalesce(opt.reEncodePixelPerMM, reEncodePixelPerMM)
crop = langext.CoalesceOpt(opt.crop, crop)
debug = langext.Coalesce(opt.debug, debug)
alphaOverride = langext.CoalesceOpt(opt.alphaOverride, alphaOverride)
}
if flow {
y = b.GetY()
}
regName := img.Name
var subImageBounds *imageext.PercentageRectangle = nil
if imageFit != nil || fillColor != nil || crop != nil {
var dataimg image.Image
@@ -277,11 +306,21 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) {
pxw := w * pdfPixelPerMillimeter
pxh := h * pdfPixelPerMillimeter
dataimg, err = imageext.ObjectFitImage(dataimg, pxw, pxh, *imageFit, fillColor)
var dataImgRect imageext.PercentageRectangle
dataimg, dataImgRect, err = imageext.ObjectFitImage(dataimg, pxw, pxh, *imageFit, fillColor)
if err != nil {
b.b.SetError(err)
return
}
subImageBounds = &dataImgRect
}
if dataimg.ColorModel() != color.RGBAModel && dataimg.ColorModel() != color.NRGBAModel {
// the image cannto be 16bpp or similar - otherwise fpdf errors out
dataImgRGBA := image.NewNRGBA(image.Rect(0, 0, dataimg.Bounds().Dx(), dataimg.Bounds().Dy()))
draw.Draw(dataImgRGBA, dataImgRGBA.Bounds(), dataimg, dataimg.Bounds().Min, draw.Src)
dataimg = dataImgRGBA
}
bfr, imgMime, err := imageext.EncodeImage(dataimg, compression)
@@ -305,6 +344,12 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) {
}
if alphaOverride != nil {
oldA, oldBMS := b.b.GetAlpha()
b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2))
defer func() { b.b.SetAlpha(oldA, oldBMS) }()
}
fpdfOpt := gofpdf.ImageOptions{
ImageType: imageType,
ReadDpi: readDpi,
@@ -312,4 +357,16 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) {
}
b.b.ImageOptions(regName, x, y, w, h, flow, fpdfOpt, link, linkStr)
if debug {
b.Rect(w, h, RectOutline, NewPDFRectOpt().X(x).Y(y).LineWidth(0.25).DrawColor(255, 0, 0))
if subImageBounds != nil {
r := subImageBounds.Of(imageext.Rectangle{X: x, Y: y, W: w, H: h})
b.Rect(r.W, r.H, RectOutline, NewPDFRectOpt().X(r.X).Y(r.Y).LineWidth(0.25).DrawColor(255, 0, 0))
b.Rect(r.W, r.H, RectFill, NewPDFRectOpt().X(r.X).Y(r.Y).FillColor(255, 0, 0).Alpha(0.2, BlendNormal))
b.Line(r.X, r.Y, r.X+r.W, r.Y+r.H, NewPDFLineOpt().LineWidth(0.25).DrawColor(255, 0, 0))
b.Line(r.X+r.W, r.Y, r.X, r.Y+r.H, NewPDFLineOpt().LineWidth(0.25).DrawColor(255, 0, 0))
}
}
}

96
wpdf/wpdfLine.go Normal file
View File

@@ -0,0 +1,96 @@
package wpdf
import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type PDFLineOpt struct {
lineWidth *float64
drawColor *PDFColor
alpha *dataext.Tuple[float64, PDFBlendMode]
capStyle *PDFLineCapStyle
debug *bool
}
func NewPDFLineOpt() *PDFLineOpt {
return &PDFLineOpt{}
}
func (opt *PDFLineOpt) LineWidth(v float64) *PDFLineOpt {
opt.lineWidth = &v
return opt
}
func (opt *PDFLineOpt) DrawColor(cr, cg, cb int) *PDFLineOpt {
opt.drawColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFLineOpt) DrawColorHex(c uint32) *PDFLineOpt {
opt.drawColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFLineOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFLineOpt {
opt.alpha = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode}
return opt
}
func (opt *PDFLineOpt) CapButt() *PDFLineOpt {
opt.capStyle = langext.Ptr(CapButt)
return opt
}
func (opt *PDFLineOpt) CapSquare() *PDFLineOpt {
opt.capStyle = langext.Ptr(CapSquare)
return opt
}
func (opt *PDFLineOpt) CapRound() *PDFLineOpt {
opt.capStyle = langext.Ptr(CapRound)
return opt
}
func (opt *PDFLineOpt) Debug(v bool) *PDFLineOpt {
opt.debug = &v
return opt
}
func (b *WPDFBuilder) Line(x1 float64, y1 float64, x2 float64, y2 float64, opts ...*PDFLineOpt) {
var lineWidth *float64
var drawColor *PDFColor
var alphaOverride *dataext.Tuple[float64, PDFBlendMode]
capStyle := CapButt
debug := b.debug
for _, opt := range opts {
lineWidth = langext.CoalesceOpt(opt.lineWidth, lineWidth)
drawColor = langext.CoalesceOpt(opt.drawColor, drawColor)
alphaOverride = langext.CoalesceOpt(opt.alpha, alphaOverride)
capStyle = langext.Coalesce(opt.capStyle, capStyle)
debug = langext.Coalesce(opt.debug, debug)
}
if lineWidth != nil {
old := b.GetLineWidth()
b.SetLineWidth(*lineWidth)
defer func() { b.SetLineWidth(old) }()
}
if drawColor != nil {
oldR, oldG, oldB := b.GetDrawColor()
b.SetDrawColor(drawColor.R, drawColor.G, drawColor.B)
defer func() { b.SetDrawColor(oldR, oldG, oldB) }()
}
if alphaOverride != nil {
oldA, oldBMS := b.b.GetAlpha()
b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2))
defer func() { b.b.SetAlpha(oldA, oldBMS) }()
}
b.b.SetLineCapStyle(string(capStyle))
b.b.Line(x1, y1, x2, y2)
}

Some files were not shown because too many files have changed in this diff Show More