Compare commits

...

121 Commits

Author SHA1 Message Date
9b2028ab54 v0.0.541
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m3s
2024-11-05 14:38:42 +01:00
207fd331d5 v0.0.540 handle ct=nil same as ct=Start
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m25s
2024-10-28 14:35:05 +01:00
54b0d6701d v0.0.539
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m49s
2024-10-27 02:21:35 +02:00
fc2657179b v0.0.538
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m7s
2024-10-27 02:18:45 +02:00
d4894e31fe Merge branch 'cursortoken-paginated'
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-27 02:18:25 +02:00
0ddfaf666b v0.0.537 fix goext error print always showing error-type of highest-level error
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-10-26 23:38:13 +02:00
e154137105 Trying out paginated cursortoken variant [UNTESTED]
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m11s
2024-10-25 09:45:42 +02:00
9b9a79b4ad v0.0.536 revert bfcodegen changes
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m24s
2024-10-22 09:57:06 +02:00
5a8d7110e4 v0.0.535
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m52s
2024-10-22 09:41:59 +02:00
d47c84cd47 v0.0.534
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-22 09:40:53 +02:00
c571f3f888 v0.0.533 Made String() functions in bfcodegen nil-ptr safe
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m9s
2024-10-22 09:36:40 +02:00
e884ba6b89 v0.0.532
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m53s
2024-10-13 16:33:39 +02:00
1a8e31e5ef v0.0.531
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m58s
2024-10-13 16:10:55 +02:00
eccc0fe9e5 v0.0.530
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m31s
2024-10-09 11:15:26 +02:00
c8dec24a0d v0.0.529 handle PanicWrappedErr in exerr.FromError()
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m13s
2024-10-08 19:22:17 +02:00
b8cb989e54 v0.0.528
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m38s
2024-10-07 17:20:40 +02:00
ec672fbd49 v0.0.527
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-07 17:19:30 +02:00
cfb0b53fc7 v0.0.526
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m17s
2024-10-05 23:59:23 +02:00
a7389f44fa v0.0.525 upgrad go1.22 -> go1.23
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m4s
2024-10-05 02:45:20 +02:00
69f0fedd66 v0.0.524
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2024-10-05 01:41:10 +02:00
335ef4d8e8 v0.0.523 ringbuffer
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m23s
2024-10-05 01:28:46 +02:00
61801ff20d v0.0.522
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m53s
2024-10-05 01:12:00 +02:00
361dca5c85 v0.0.521 ctxext
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m56s
2024-10-05 01:06:36 +02:00
9f85a243e8 v0.0.520
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m7s
2024-10-05 01:02:25 +02:00
dc6cb274ee v0.0.519
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-05 00:58:15 +02:00
f6b47792a4 v0.0.518 Improve sq db-listener interface (breaking)
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m11s
2024-10-05 00:45:55 +02:00
295b3ef793 v0.0.517 add constructor funcs for tuples
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m21s
2024-10-02 11:31:34 +02:00
721c176337 v0.0.516
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m25s
2024-09-25 21:43:41 +02:00
ebba6545a3 v0.0.515
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m35s
2024-09-16 17:39:51 +02:00
19c7e22ced v0.0.514 fix mongo filter where the primary sort key is null in db (fallback to secondary)
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-09-16 17:39:18 +02:00
9f883b458f v0.0.513
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m55s
2024-09-16 15:27:32 +02:00
1f456c5134 v0.0.512
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m6s
2024-09-15 21:25:21 +02:00
d7fbef37db v0.0.511
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m10s
2024-09-15 18:22:07 +02:00
a1668b6e5a v0.0.510
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m24s
2024-09-13 18:06:49 +02:00
3a17edfaf0 v0.0.509
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 6m2s
2024-08-26 14:35:49 +02:00
3320a9c19d v0.0.508
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m25s
2024-08-25 17:36:20 +02:00
8dcd8a270a v0.0.507 fix jsonfilter:"-" not working
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 7m7s
2024-08-25 15:41:17 +02:00
03a9b276d8 v0.0.506 allow empty-string as value for enum
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 7m36s
2024-08-22 11:45:02 +02:00
9c8cde384f v0.0.505
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 6m17s
2024-08-08 15:57:05 +02:00
99b000ecf4 v0.0.504
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m53s
2024-08-07 19:44:45 +02:00
a173e30090 v0.0.503
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m52s
2024-08-07 19:37:38 +02:00
a3481a7d2d v0.0.502
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:35:23 +02:00
a8e6f98a89 v0.0.501
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:31:36 +02:00
ab805403b9 v0.0.500
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:30:38 +02:00
1e98d351ce v0.0.499
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m24s
2024-08-07 18:34:22 +02:00
c40bdc8e9e v0.0.498
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m13s
2024-08-07 17:26:35 +02:00
7204562879 v0.0.497
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m33s
2024-08-07 17:04:59 +02:00
741611a2e1 v0.0.496 wpdf fixes and wpdf test.go
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m58s
2024-08-07 15:34:06 +02:00
133aeb8374 v0.0.495
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m44s
2024-08-07 14:00:02 +02:00
b78a468632 v0.0.494 add tables to wpdf
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 13:57:29 +02:00
f1b4480e0f v0.0.493 fix panic in RegisterImage for very short images
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m22s
2024-08-07 09:22:37 +02:00
ffffe4bf24 v0.0.492
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m32s
2024-08-02 16:19:21 +02:00
413bf3c848 v0.0.491 small optimization in Paginate method
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 8m31s
2024-07-31 00:15:09 +02:00
646990b549 v0.0.490 documentation and extra-params in exerr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m2s
2024-07-27 23:44:18 +02:00
e5818146a8 v0.0.489
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m54s
2024-07-23 14:21:03 +02:00
1310054121 v0.0.488 fix wpdf with 16bpp images
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m9s
2024-07-22 15:16:28 +02:00
49d423915c v0.0.487
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m54s
2024-07-18 17:45:56 +02:00
1962cb3c52 v0.0.486 add ginext -> CorsAllowHeader
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m51s
2024-07-18 17:29:18 +02:00
84f124dd4d v0.0.485
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m44s
2024-07-16 15:22:18 +02:00
ff8e066135 v0.0.484
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m58s
2024-07-16 15:16:56 +02:00
bc5c61e43d v0.0.483
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m37s
2024-07-16 15:08:37 +02:00
6ded615723 v0.0.482 mathext.Percentile
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m46s
2024-07-12 16:33:42 +02:00
abc8af525a v0.0.481
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m50s
2024-07-04 16:24:49 +02:00
19d943361b v0.0.480
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m20s
2024-07-02 11:32:22 +02:00
b464afae01 v0.0.479 AccessStruct
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m36s
2024-07-02 11:29:47 +02:00
56bc5e8285 v0.0.478
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m41s
2024-07-01 17:23:00 +02:00
cb95bb561c v0.0.477 add langext.StrWrap
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m4s
2024-06-29 15:36:39 +02:00
dff8941bd3 v0.0.476 Ãproperly close cursor in wmo
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m44s
2024-06-28 18:37:02 +02:00
78e1c33e30 v0.0.475 ArrGroupBy
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m36s
2024-06-16 17:14:21 +02:00
d2f2a0558a v0.0.474 Add ZeroLogger config field to exerr.Init to override used zerolog instance
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m27s
2024-06-14 23:18:58 +02:00
fc4bed4b9f v0.0.473 add ctx to wmo.FilterQuery|Sort|Pagination
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m23s
2024-06-14 17:24:59 +02:00
julian
94a7bf250d v0.0.472 changed gin engine initialization
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m34s
2024-06-14 14:56:41 +02:00
f6121a6961 v0.0.471 Revert "v0.0.470 Add GoextJsonMarshaller interface to call when marshalling json via gojson"
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m50s
2024-06-11 19:39:43 +02:00
7fc73f1e93 v0.0.470 Add GoextJsonMarshaller interface to call when marshalling json via gojson
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m51s
2024-06-11 19:34:48 +02:00
2504ef00a0 v0.0.469
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m26s
2024-06-11 12:10:49 +02:00
fc5803493c added DblPtrIfNotNil
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m31s
2024-06-05 17:53:57 +02:00
a9295bfabf added CoalesceDblPtr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m37s
2024-06-05 15:10:31 +02:00
12fa53d848 v0.0.466 exerr.Wrap now inherits the Severity of the wrapped error
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m24s
2024-06-03 13:48:30 +02:00
d2bb362135 v0.0.465
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m39s
2024-06-03 09:39:57 +02:00
9dd81f6bd5 v0.0.464 improve ZeroLogErrTraces/ZeroLogAllTraces output for empty-message wrapped exerrs
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m36s
2024-06-01 02:40:48 +02:00
d2c04afcd5 v0.0.463 Fix SubtractYears
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m29s
2024-05-29 20:20:01 +02:00
62980e1489 v0.0.462
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m32s
2024-05-23 14:37:05 +02:00
59963adf74 v0.0.461
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m17s
2024-05-20 00:52:49 +02:00
194ea4ace5 v0.0.460
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m16s
2024-05-20 00:38:04 +02:00
73b80a66bc v0.0.459
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m16s
2024-05-20 00:20:31 +02:00
d8b2d01274 v0.0.458 revert 457 and fix ObjectFitImage
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m49s
2024-05-20 00:15:24 +02:00
bfa8457e95 v0.0.457 test
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m38s
2024-05-20 00:07:33 +02:00
70106733d9 v0.0.456
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m38s
2024-05-18 23:38:47 +02:00
ce7837b9ef v0.0.455 add proper json/bson marshalling to exerr [severity|type|category]
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m39s
2024-05-16 15:38:42 +02:00
d0d72167eb v0.0.454
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m17s
2024-05-14 15:10:27 +02:00
a55ee1a6ce v0.0.453
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m18s
2024-05-14 14:57:10 +02:00
dfc319573c v0.0.452
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m16s
2024-05-14 12:48:43 +02:00
246e555f3f v0.0.451 wpdf image processing
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-05-14 12:46:49 +02:00
c28bc086b2 v0.0.450 wpdf
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m33s
2024-05-14 11:52:56 +02:00
d44e971325 v0.0.449
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m15s
2024-05-12 16:51:52 +02:00
fe4cdc48af v0.0.448 wmo marshalHook
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 25s
2024-05-12 16:45:45 +02:00
631006a4e1 v0.0.447
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m18s
2024-05-10 21:33:01 +02:00
567ead8697 v0.0.446 syncMap.GetAndSetIfNotContains and CASMutex
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-05-10 21:31:36 +02:00
e4886b4a7d v0.0.445 added CtxData() and ExtendGinMeta/ExtendContextMeta to exerr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m55s
2024-05-03 15:28:53 +02:00
dcb5d3d7cd v0.0.444 change gin values in exerr auto meta to not include dots in keys (fucks up mongo)
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m9s
2024-05-03 13:24:08 +02:00
15a639f85a v0.0.443 fix wmo.List with pageSize==0
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m9s
2024-05-03 11:56:29 +02:00
303bd04649 v0.0.442
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m0s
2024-04-29 17:24:10 +02:00
7bda674939 v0.0.441
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m29s
2024-04-29 17:19:55 +02:00
126d4fbd0b v0.0.440 improve exerr.toJson
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m54s
2024-04-29 16:03:58 +02:00
fed8bccaab v0.0.439
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m38s
2024-04-25 11:47:16 +02:00
47b6a6b508 v0.0.438
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m20s
2024-04-25 11:40:01 +02:00
764ce79a71 v0.0.437 properly handle $group in wmo extraModPipeline
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m23s
2024-04-23 16:12:17 +02:00
b876c64ba2 v0.0.436
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m56s
2024-04-18 14:09:26 +02:00
8d52b41f57 v0.0.435 add ConvertStructToMapOpt.MaxDepth
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m39s
2024-04-15 12:55:44 +02:00
f47e2a33fe v0.0.434
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m13s
2024-04-15 10:43:26 +02:00
9321938dad v0.0.433 fix exerr missing gindata when using ginext.Error and add config for Output logging to stderr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m0s
2024-04-15 10:25:30 +02:00
3828d601a2 v0.0.432 better handling of unmarshall-able values in exerr meta
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-04-13 22:08:45 +02:00
2e713c808d v0.0.431
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m10s
2024-04-10 15:29:59 +02:00
6602f86b43 v0.0.430
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-04-10 15:27:41 +02:00
24d9f0fdc7 v0.0.429
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m23s
2024-04-08 16:33:44 +02:00
8446b2da22 v0.0.428
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-04-08 16:32:34 +02:00
758e5a67b5 v0.0.427
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m30s
2024-04-07 15:10:21 +02:00
678ddd7124 v0.0.426 fix JsonOpt
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m57s
2024-04-01 16:03:00 +02:00
36b71dfaf3 v0.0.425 ArrAppend
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m25s
2024-03-30 14:24:53 +01:00
9491b72b8d v0.0.424 timeext.SubtractYears
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m29s
2024-03-30 03:01:55 +01:00
6c4af4006b v0.0.423 fix createPaginationPipeline - different primary and secondary sort keys broke mongo ??!?? - it actually only sorted by the secondary condition (ignoring the primary?)
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m32s
2024-03-24 15:25:52 +01:00
103 changed files with 7517 additions and 1164 deletions

View File

@@ -8,7 +8,7 @@ This should not have any heavy dependencies (gin, mongo, etc) and add missing ba
Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
### Packages:
## Packages:
| Name | Maintainer | Description |
|-------------|------------|---------------------------------------------------------------------------------------------------------------|
@@ -20,8 +20,9 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
| zipext | Mike | Utility for zip/gzip/tar etc |
| reflectext | Mike | Utility for golang reflection |
| fsext | Mike | Utility for filesytem access |
| ctxext | Mike | Utility for context.Context |
| | | |
| mongoext | Mike | Utility/Helper functions for mongodb |
| mongoext | Mike | Utility/Helper functions for mongodb (kinda abandoned) |
| cursortoken | Mike | MongoDB cursortoken implementation |
| pagination | Mike | Pagination implementation |
| | | |
@@ -42,4 +43,69 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
| | | |
| scn | Mike | SimpleCloudNotifier |
| | | |
| | | |
## Usage:
### exerr
- see **mongoext/builder.go** for full info
Short summary:
- An better error package with metadata, listener, api-output and error-traces
- Initialize with `exerr.Init()`
- *Never* return `err` direct, always use exerr.Wrap(err, "...") - add metadata where applicable
- at the end either Print(), Fatal() or Output() your error (print = stdout, fatal = panic, output = json API response)
- You can add listeners with exerr.RegisterListener(), and save the full errors to a db or smth
### wmo
- A typed wrapper around the official mongo-go-driver
- Use `wmo.W[...](...)` to wrap the collections and type-ify them
- The new collections have all the usual methods, but types
- Also they have List() and Paginate() methods for paginated listings (witehr with a cursortoken or page/limit)
- Register additional hooks with `WithDecodeFunc`, `WithUnmarshalHook`, `WithMarshalHook`, `WithModifyingPipeline`, `WithModifyingPipelineFunc`
- List(), Paginate(), etc support filter interfaces
- Rule(s) of thumb:
- filter the results in the filter interface
- sort the results in the sort function of the filter interface
- add joins ($lookup's) in the `WithModifyingPipelineFunc`/`WithModifyingPipeline`
#### ginext
- A wrapper around gin-gonic/gin
- create the gin engine with `ginext.NewEngine`
- Add routes with `engine.Routes()...`
- `.Use(..)` adds a middleware
- `.Group(..)` adds a group
- `.Get().Handle(..)` adds a handler
- Handler return values (in contract to ginext) - values implement the `ginext.HTTPResponse` interface
- Every handler starts with something like:
```go
func (handler Handler) CommunityMetricsValues(pctx ginext.PreContext) ginext.HTTPResponse {
type communityURI struct {
Version string `uri:"version"`
CommunityID models.CommunityID `uri:"cid"`
}
type body struct {
UserID models.UserID `json:"userID"`
EventID models.EventID `json:"eventID"`
}
var u uri
var b body
ctx, gctx, httpErr := pctx.URI(&u).Body(&b).Start() // can have more unmarshaller, like header, form, etc
if httpErr != nil {
return *httpErr
}
defer ctx.Cancel()
// do stuff
}
```
#### sq
- TODO (like mongoext for sqlite/sql databases)

View File

@@ -46,7 +46,7 @@ var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]
var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<comm>.*))?.*$`))
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]*"|[0-9]+))\s*(//(?P<comm>.*))?.*$`))
var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))

27
ctxext/getter.go Normal file
View File

@@ -0,0 +1,27 @@
package ctxext
import "context"
func Value[T any](ctx context.Context, key any) (T, bool) {
v := ctx.Value(key)
if v == nil {
return *new(T), false
}
if tv, ok := v.(T); !ok {
return *new(T), false
} else {
return tv, true
}
}
func ValueOrDefault[T any](ctx context.Context, key any, def T) T {
v := ctx.Value(key)
if v == nil {
return def
}
if tv, ok := v.(T); !ok {
return def
} else {
return tv
}
}

View File

@@ -1,14 +1,15 @@
package cursortoken
import (
"context"
"go.mongodb.org/mongo-driver/mongo"
)
type RawFilter interface {
FilterQuery() mongo.Pipeline
FilterQuery(ctx context.Context) mongo.Pipeline
}
type Filter interface {
FilterQuery() mongo.Pipeline
Pagination() (string, SortDirection, string, SortDirection)
FilterQuery(ctx context.Context) mongo.Pipeline
Pagination(ctx context.Context) (string, SortDirection, string, SortDirection)
}

View File

@@ -3,12 +3,17 @@ package cursortoken
import (
"encoding/base32"
"encoding/json"
"errors"
"go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"strconv"
"strings"
"time"
)
type CursorToken interface {
Token() string
IsEnd() bool
}
type Mode string
const (
@@ -24,97 +29,6 @@ type Extra struct {
PageSize *int
}
type CursorToken struct {
Mode Mode
ValuePrimary string
ValueSecondary string
Direction SortDirection
DirectionSecondary SortDirection
PageSize int
Extra Extra
}
type cursorTokenSerialize struct {
ValuePrimary *string `json:"v1,omitempty"`
ValueSecondary *string `json:"v2,omitempty"`
Direction *SortDirection `json:"dir,omitempty"`
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
PageSize *int `json:"size,omitempty"`
ExtraTimestamp *time.Time `json:"ts,omitempty"`
ExtraId *string `json:"id,omitempty"`
ExtraPage *int `json:"pg,omitempty"`
ExtraPageSize *int `json:"sz,omitempty"`
}
func Start() CursorToken {
return CursorToken{
Mode: CTMStart,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func End() CursorToken {
return CursorToken{
Mode: CTMEnd,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func (c *CursorToken) Token() string {
if c.Mode == CTMStart {
return "@start"
}
if c.Mode == CTMEnd {
return "@end"
}
// We kinda manually implement omitempty for the CursorToken here
// because omitempty does not work for time.Time and otherwise we would always
// get weird time values when decoding a token that initially didn't have an Timestamp set
// For this usecase we treat Unix=0 as an empty timestamp
sertok := cursorTokenSerialize{}
if c.ValuePrimary != "" {
sertok.ValuePrimary = &c.ValuePrimary
}
if c.ValueSecondary != "" {
sertok.ValueSecondary = &c.ValueSecondary
}
if c.Direction != "" {
sertok.Direction = &c.Direction
}
if c.DirectionSecondary != "" {
sertok.DirectionSecondary = &c.DirectionSecondary
}
if c.PageSize != 0 {
sertok.PageSize = &c.PageSize
}
sertok.ExtraTimestamp = c.Extra.Timestamp
sertok.ExtraId = c.Extra.Id
sertok.ExtraPage = c.Extra.Page
sertok.ExtraPageSize = c.Extra.PageSize
body, err := json.Marshal(sertok)
if err != nil {
panic(err)
}
return "tok_" + base32.StdEncoding.EncodeToString(body)
}
func Decode(tok string) (CursorToken, error) {
if tok == "" {
return Start(), nil
@@ -125,60 +39,56 @@ func Decode(tok string) (CursorToken, error) {
if strings.ToLower(tok) == "@end" {
return End(), nil
}
if !strings.HasPrefix(tok, "tok_") {
return CursorToken{}, errors.New("could not decode token, missing prefix")
if strings.ToLower(tok) == "$end" {
return PageEnd(), nil
}
if strings.HasPrefix(tok, "$") && len(tok) > 1 {
n, err := strconv.ParseInt(tok[1:], 10, 64)
if err != nil {
return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build()
}
return Page(int(n)), nil
}
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
if err != nil {
return CursorToken{}, err
}
if strings.HasPrefix(tok, "tok_") {
var tokenDeserialize cursorTokenSerialize
err = json.Unmarshal(body, &tokenDeserialize)
if err != nil {
return CursorToken{}, err
}
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
if err != nil {
return nil, err
}
token := CursorToken{Mode: CTMNormal}
var tokenDeserialize cursorTokenKeySortSerialize
err = json.Unmarshal(body, &tokenDeserialize)
if err != nil {
return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build()
}
if tokenDeserialize.ValuePrimary != nil {
token.ValuePrimary = *tokenDeserialize.ValuePrimary
}
if tokenDeserialize.ValueSecondary != nil {
token.ValueSecondary = *tokenDeserialize.ValueSecondary
}
if tokenDeserialize.Direction != nil {
token.Direction = *tokenDeserialize.Direction
}
if tokenDeserialize.DirectionSecondary != nil {
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
}
if tokenDeserialize.PageSize != nil {
token.PageSize = *tokenDeserialize.PageSize
}
token := CTKeySort{Mode: CTMNormal}
token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp
token.Extra.Id = tokenDeserialize.ExtraId
token.Extra.Page = tokenDeserialize.ExtraPage
token.Extra.PageSize = tokenDeserialize.ExtraPageSize
if tokenDeserialize.ValuePrimary != nil {
token.ValuePrimary = *tokenDeserialize.ValuePrimary
}
if tokenDeserialize.ValueSecondary != nil {
token.ValueSecondary = *tokenDeserialize.ValueSecondary
}
if tokenDeserialize.Direction != nil {
token.Direction = *tokenDeserialize.Direction
}
if tokenDeserialize.DirectionSecondary != nil {
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
}
if tokenDeserialize.PageSize != nil {
token.PageSize = *tokenDeserialize.PageSize
}
return token, nil
}
token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp
token.Extra.Id = tokenDeserialize.ExtraId
token.Extra.Page = tokenDeserialize.ExtraPage
token.Extra.PageSize = tokenDeserialize.ExtraPageSize
return token, nil
func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}
func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
return nil, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing/unknown prefix").Str("token", tok).Build()
}
}

132
cursortoken/tokenKeySort.go Normal file
View File

@@ -0,0 +1,132 @@
package cursortoken
import (
"encoding/base32"
"encoding/json"
"go.mongodb.org/mongo-driver/bson/primitive"
"time"
)
type CTKeySort struct {
Mode Mode
ValuePrimary string
ValueSecondary string
Direction SortDirection
DirectionSecondary SortDirection
PageSize int
Extra Extra
}
type cursorTokenKeySortSerialize struct {
ValuePrimary *string `json:"v1,omitempty"`
ValueSecondary *string `json:"v2,omitempty"`
Direction *SortDirection `json:"dir,omitempty"`
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
PageSize *int `json:"size,omitempty"`
ExtraTimestamp *time.Time `json:"ts,omitempty"`
ExtraId *string `json:"id,omitempty"`
ExtraPage *int `json:"pg,omitempty"`
ExtraPageSize *int `json:"sz,omitempty"`
}
func NewKeySortToken(valuePrimary string, valueSecondary string, direction SortDirection, directionSecondary SortDirection, pageSize int, extra Extra) CursorToken {
return CTKeySort{
Mode: CTMNormal,
ValuePrimary: valuePrimary,
ValueSecondary: valueSecondary,
Direction: direction,
DirectionSecondary: directionSecondary,
PageSize: pageSize,
Extra: extra,
}
}
func Start() CursorToken {
return CTKeySort{
Mode: CTMStart,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func End() CursorToken {
return CTKeySort{
Mode: CTMEnd,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func (c CTKeySort) Token() string {
if c.Mode == CTMStart {
return "@start"
}
if c.Mode == CTMEnd {
return "@end"
}
// We kinda manually implement omitempty for the CursorToken here
// because omitempty does not work for time.Time and otherwise we would always
// get weird time values when decoding a token that initially didn't have an Timestamp set
// For this usecase we treat Unix=0 as an empty timestamp
sertok := cursorTokenKeySortSerialize{}
if c.ValuePrimary != "" {
sertok.ValuePrimary = &c.ValuePrimary
}
if c.ValueSecondary != "" {
sertok.ValueSecondary = &c.ValueSecondary
}
if c.Direction != "" {
sertok.Direction = &c.Direction
}
if c.DirectionSecondary != "" {
sertok.DirectionSecondary = &c.DirectionSecondary
}
if c.PageSize != 0 {
sertok.PageSize = &c.PageSize
}
sertok.ExtraTimestamp = c.Extra.Timestamp
sertok.ExtraId = c.Extra.Id
sertok.ExtraPage = c.Extra.Page
sertok.ExtraPageSize = c.Extra.PageSize
body, err := json.Marshal(sertok)
if err != nil {
panic(err)
}
return "tok_" + base32.StdEncoding.EncodeToString(body)
}
func (c CTKeySort) IsEnd() bool {
return c.Mode == CTMEnd
}
func (c CTKeySort) valuePrimaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}
func (c CTKeySort) valueSecondaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}

View File

@@ -0,0 +1,37 @@
package cursortoken
import "strconv"
type CTPaginated struct {
Mode Mode
Page int
}
func Page(p int) CursorToken {
return CTPaginated{
Mode: CTMNormal,
Page: p,
}
}
func PageEnd() CursorToken {
return CTPaginated{
Mode: CTMEnd,
Page: 0,
}
}
func (c CTPaginated) Token() string {
if c.Mode == CTMStart {
return "$1"
}
if c.Mode == CTMEnd {
return "$end"
}
return "$" + strconv.Itoa(c.Page)
}
func (c CTPaginated) IsEnd() bool {
return c.Mode == CTMEnd
}

254
dataext/casMutex.go Normal file
View File

@@ -0,0 +1,254 @@
package dataext
import (
"context"
"golang.org/x/sync/semaphore"
"runtime"
"sync"
"sync/atomic"
"time"
"unsafe"
)
// from https://github.com/viney-shih/go-lock/blob/2f19fd8ce335e33e0ab9dccb1ff2ce820c3da332/cas.go
// CASMutex is the struct implementing RWMutex with CAS mechanism.
type CASMutex struct {
state casState
turnstile *semaphore.Weighted
broadcastChan chan struct{}
broadcastMut sync.RWMutex
}
func NewCASMutex() *CASMutex {
return &CASMutex{
state: casStateNoLock,
turnstile: semaphore.NewWeighted(1),
broadcastChan: make(chan struct{}),
}
}
type casState int32
const (
casStateUndefined casState = iota - 2 // -2
casStateWriteLock // -1
casStateNoLock // 0
casStateReadLock // >= 1
)
func (m *CASMutex) getState(n int32) casState {
switch st := casState(n); {
case st == casStateWriteLock:
fallthrough
case st == casStateNoLock:
return st
case st >= casStateReadLock:
return casStateReadLock
default:
// actually, it should not happened.
return casStateUndefined
}
}
func (m *CASMutex) listen() <-chan struct{} {
m.broadcastMut.RLock()
defer m.broadcastMut.RUnlock()
return m.broadcastChan
}
func (m *CASMutex) broadcast() {
newCh := make(chan struct{})
m.broadcastMut.Lock()
ch := m.broadcastChan
m.broadcastChan = newCh
m.broadcastMut.Unlock()
close(ch)
}
func (m *CASMutex) tryLock(ctx context.Context) bool {
for {
broker := m.listen()
if atomic.CompareAndSwapInt32(
(*int32)(unsafe.Pointer(&m.state)),
int32(casStateNoLock),
int32(casStateWriteLock),
) {
return true
}
if ctx == nil {
return false
}
select {
case <-ctx.Done():
// timeout or cancellation
return false
case <-broker:
// waiting for signal triggered by m.broadcast() and trying again.
}
}
}
// TryLockWithContext attempts to acquire the lock, blocking until resources
// are available or ctx is done (timeout or cancellation).
func (m *CASMutex) TryLockWithContext(ctx context.Context) bool {
if err := m.turnstile.Acquire(ctx, 1); err != nil {
// Acquire failed due to timeout or cancellation
return false
}
defer m.turnstile.Release(1)
return m.tryLock(ctx)
}
// Lock acquires the lock.
// If it is currently held by others, Lock will wait until it has a chance to acquire it.
func (m *CASMutex) Lock() {
ctx := context.Background()
m.TryLockWithContext(ctx)
}
// TryLock attempts to acquire the lock without blocking.
// Return false if someone is holding it now.
func (m *CASMutex) TryLock() bool {
if !m.turnstile.TryAcquire(1) {
return false
}
defer m.turnstile.Release(1)
return m.tryLock(nil)
}
// TryLockWithTimeout attempts to acquire the lock within a period of time.
// Return false if spending time is more than duration and no chance to acquire it.
func (m *CASMutex) TryLockWithTimeout(duration time.Duration) bool {
ctx, cancel := context.WithTimeout(context.Background(), duration)
defer cancel()
return m.TryLockWithContext(ctx)
}
// Unlock releases the lock.
func (m *CASMutex) Unlock() {
if ok := atomic.CompareAndSwapInt32(
(*int32)(unsafe.Pointer(&m.state)),
int32(casStateWriteLock),
int32(casStateNoLock),
); !ok {
panic("Unlock failed")
}
m.broadcast()
}
func (m *CASMutex) rTryLock(ctx context.Context) bool {
for {
broker := m.listen()
n := atomic.LoadInt32((*int32)(unsafe.Pointer(&m.state)))
st := m.getState(n)
switch st {
case casStateNoLock, casStateReadLock:
if atomic.CompareAndSwapInt32((*int32)(unsafe.Pointer(&m.state)), n, n+1) {
return true
}
}
if ctx == nil {
return false
}
select {
case <-ctx.Done():
// timeout or cancellation
return false
default:
switch st {
// read-lock failed due to concurrence issue, try again immediately
case casStateNoLock, casStateReadLock:
runtime.Gosched() // allow other goroutines to do stuff.
continue
}
}
select {
case <-ctx.Done():
// timeout or cancellation
return false
case <-broker:
// waiting for signal triggered by m.broadcast() and trying again.
}
}
}
// RTryLockWithContext attempts to acquire the read lock, blocking until resources
// are available or ctx is done (timeout or cancellation).
func (m *CASMutex) RTryLockWithContext(ctx context.Context) bool {
if err := m.turnstile.Acquire(ctx, 1); err != nil {
// Acquire failed due to timeout or cancellation
return false
}
m.turnstile.Release(1)
return m.rTryLock(ctx)
}
// RLock acquires the read lock.
// If it is currently held by others writing, RLock will wait until it has a chance to acquire it.
func (m *CASMutex) RLock() {
ctx := context.Background()
m.RTryLockWithContext(ctx)
}
// RTryLock attempts to acquire the read lock without blocking.
// Return false if someone is writing it now.
func (m *CASMutex) RTryLock() bool {
if !m.turnstile.TryAcquire(1) {
return false
}
m.turnstile.Release(1)
return m.rTryLock(nil)
}
// RTryLockWithTimeout attempts to acquire the read lock within a period of time.
// Return false if spending time is more than duration and no chance to acquire it.
func (m *CASMutex) RTryLockWithTimeout(duration time.Duration) bool {
ctx, cancel := context.WithTimeout(context.Background(), duration)
defer cancel()
return m.RTryLockWithContext(ctx)
}
// RUnlock releases the read lock.
func (m *CASMutex) RUnlock() {
n := atomic.AddInt32((*int32)(unsafe.Pointer(&m.state)), -1)
switch m.getState(n) {
case casStateUndefined, casStateWriteLock:
panic("RUnlock failed")
case casStateNoLock:
m.broadcast()
}
}
// RLocker returns a Locker interface that implements the Lock and Unlock methods
// by calling CASMutex.RLock and CASMutex.RUnlock.
func (m *CASMutex) RLocker() sync.Locker {
return (*rlocker)(m)
}
type rlocker CASMutex
func (r *rlocker) Lock() { (*CASMutex)(r).RLock() }
func (r *rlocker) Unlock() { (*CASMutex)(r).RUnlock() }

View File

@@ -25,6 +25,7 @@ func (m *JsonOpt[T]) UnmarshalJSON(data []byte) error {
return errors.New("JsonOpt: UnmarshalJSON on nil pointer")
}
m.isSet = true
return json.Unmarshal(data, &m.value)
}
@@ -56,3 +57,11 @@ func (m JsonOpt[T]) MustValue() T {
}
return m.value
}
func (m JsonOpt[T]) IfSet(fn func(v T)) bool {
if !m.isSet {
return false
}
fn(m.value)
return true
}

98
dataext/ringBuffer.go Normal file
View File

@@ -0,0 +1,98 @@
package dataext
import "iter"
type RingBuffer[T any] struct {
items []T //
capacity int // max number of items the buffer can hold
size int // how many items are in the buffer
head int // ptr to next item
}
func NewRingBuffer[T any](capacity int) *RingBuffer[T] {
return &RingBuffer[T]{
items: make([]T, capacity),
capacity: capacity,
size: 0,
head: 0,
}
}
func (rb *RingBuffer[T]) Push(item T) {
if rb.size < rb.capacity {
rb.size++
}
rb.items[rb.head] = item
rb.head = (rb.head + 1) % rb.capacity
}
func (rb *RingBuffer[T]) Peek() (T, bool) {
if rb.size == 0 {
return *new(T), false
}
return rb.items[(rb.head-1+rb.capacity)%rb.capacity], true
}
func (rb *RingBuffer[T]) Items() []T {
if rb.size < rb.capacity {
return rb.items[:rb.size]
}
return append(rb.items[rb.head:], rb.items[:rb.head]...)
}
func (rb *RingBuffer[T]) Size() int {
return rb.size
}
func (rb *RingBuffer[T]) Capacity() int {
return rb.capacity
}
func (rb *RingBuffer[T]) Clear() {
rb.size = 0
rb.head = 0
}
func (rb *RingBuffer[T]) IsFull() bool {
return rb.size == rb.capacity
}
func (rb *RingBuffer[T]) At(i int) T {
if i < 0 || i >= rb.size {
panic("Index out of bounds")
}
if rb.size < rb.capacity {
return rb.items[i]
}
return rb.items[(rb.head+i)%rb.capacity]
}
func (rb *RingBuffer[T]) Get(i int) (T, bool) {
if i < 0 || i >= rb.size {
return *new(T), false
}
if rb.size < rb.capacity {
return rb.items[i], true
}
return rb.items[(rb.head+i)%rb.capacity], true
}
func (rb *RingBuffer[T]) Iter() iter.Seq[T] {
return func(yield func(T) bool) {
for i := 0; i < rb.size; i++ {
if !yield(rb.At(i)) {
return
}
}
}
}
func (rb *RingBuffer[T]) Iter2() iter.Seq2[int, T] {
return func(yield func(int, T) bool) {
for i := 0; i < rb.size; i++ {
if !yield(i, rb.At(i)) {
return
}
}
}
}

View File

@@ -35,6 +35,23 @@ func (s *SyncMap[TKey, TData]) SetIfNotContains(key TKey, data TData) bool {
return true
}
func (s *SyncMap[TKey, TData]) SetIfNotContainsFunc(key TKey, data func() TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TKey]TData)
}
if _, existsInPreState := s.data[key]; existsInPreState {
return false
}
s.data[key] = data()
return true
}
func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) {
s.lock.Lock()
defer s.lock.Unlock()
@@ -50,6 +67,39 @@ func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) {
}
}
func (s *SyncMap[TKey, TData]) GetAndSetIfNotContains(key TKey, data TData) TData {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TKey]TData)
}
if v, ok := s.data[key]; ok {
return v
} else {
s.data[key] = data
return data
}
}
func (s *SyncMap[TKey, TData]) GetAndSetIfNotContainsFunc(key TKey, data func() TData) TData {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TKey]TData)
}
if v, ok := s.data[key]; ok {
return v
} else {
dataObj := data()
s.data[key] = dataObj
return dataObj
}
}
func (s *SyncMap[TKey, TData]) Delete(key TKey) bool {
s.lock.Lock()
defer s.lock.Unlock()

View File

@@ -19,6 +19,14 @@ func (s Single[T1]) TupleValues() []any {
return []any{s.V1}
}
func NewSingle[T1 any](v1 T1) Single[T1] {
return Single[T1]{V1: v1}
}
func NewTuple1[T1 any](v1 T1) Single[T1] {
return Single[T1]{V1: v1}
}
// ----------------------------------------------------------------------------
type Tuple[T1 any, T2 any] struct {
@@ -34,6 +42,14 @@ func (t Tuple[T1, T2]) TupleValues() []any {
return []any{t.V1, t.V2}
}
func NewTuple[T1 any, T2 any](v1 T1, v2 T2) Tuple[T1, T2] {
return Tuple[T1, T2]{V1: v1, V2: v2}
}
func NewTuple2[T1 any, T2 any](v1 T1, v2 T2) Tuple[T1, T2] {
return Tuple[T1, T2]{V1: v1, V2: v2}
}
// ----------------------------------------------------------------------------
type Triple[T1 any, T2 any, T3 any] struct {
@@ -50,6 +66,14 @@ func (t Triple[T1, T2, T3]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3}
}
func NewTriple[T1 any, T2 any, T3 any](v1 T1, v2 T2, v3 T3) Triple[T1, T2, T3] {
return Triple[T1, T2, T3]{V1: v1, V2: v2, V3: v3}
}
func NewTuple3[T1 any, T2 any, T3 any](v1 T1, v2 T2, v3 T3) Triple[T1, T2, T3] {
return Triple[T1, T2, T3]{V1: v1, V2: v2, V3: v3}
}
// ----------------------------------------------------------------------------
type Quadruple[T1 any, T2 any, T3 any, T4 any] struct {
@@ -67,6 +91,14 @@ func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4}
}
func NewQuadruple[T1 any, T2 any, T3 any, T4 any](v1 T1, v2 T2, v3 T3, v4 T4) Quadruple[T1, T2, T3, T4] {
return Quadruple[T1, T2, T3, T4]{V1: v1, V2: v2, V3: v3, V4: v4}
}
func NewTuple4[T1 any, T2 any, T3 any, T4 any](v1 T1, v2 T2, v3 T3, v4 T4) Quadruple[T1, T2, T3, T4] {
return Quadruple[T1, T2, T3, T4]{V1: v1, V2: v2, V3: v3, V4: v4}
}
// ----------------------------------------------------------------------------
type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct {
@@ -86,6 +118,14 @@ func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any {
}
func NewQuintuple[T1 any, T2 any, T3 any, T4 any, T5 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5) Quintuple[T1, T2, T3, T4, T5] {
return Quintuple[T1, T2, T3, T4, T5]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5}
}
func NewTuple5[T1 any, T2 any, T3 any, T4 any, T5 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5) Quintuple[T1, T2, T3, T4, T5] {
return Quintuple[T1, T2, T3, T4, T5]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5}
}
// ----------------------------------------------------------------------------
type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct {
@@ -106,6 +146,14 @@ func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any {
}
func NewSextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6) Sextuple[T1, T2, T3, T4, T5, T6] {
return Sextuple[T1, T2, T3, T4, T5, T6]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6}
}
func NewTuple6[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6) Sextuple[T1, T2, T3, T4, T5, T6] {
return Sextuple[T1, T2, T3, T4, T5, T6]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6}
}
// ----------------------------------------------------------------------------
type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct {
@@ -126,6 +174,14 @@ func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7}
}
func NewSeptuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7) Septuple[T1, T2, T3, T4, T5, T6, T7] {
return Septuple[T1, T2, T3, T4, T5, T6, T7]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7}
}
func NewTuple7[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7) Septuple[T1, T2, T3, T4, T5, T6, T7] {
return Septuple[T1, T2, T3, T4, T5, T6, T7]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7}
}
// ----------------------------------------------------------------------------
type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct {
@@ -147,6 +203,14 @@ func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8}
}
func NewOctuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8) Octuple[T1, T2, T3, T4, T5, T6, T7, T8] {
return Octuple[T1, T2, T3, T4, T5, T6, T7, T8]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8}
}
func NewTuple8[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8) Octuple[T1, T2, T3, T4, T5, T6, T7, T8] {
return Octuple[T1, T2, T3, T4, T5, T6, T7, T8]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8}
}
// ----------------------------------------------------------------------------
type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct {
@@ -168,3 +232,10 @@ func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int {
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9}
}
func NewNonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8, v9 T9) Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9] {
return Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8, V9: v9}
}
func NewTuple9[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8, v9 T9) Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9] {
return Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8, V9: v9}
}

View File

@@ -30,6 +30,10 @@ import (
// If possible add metadata to the error (eg the id that was not found, ...), the methods are the same as in zerolog
// return nil, exerror.Wrap(err, "do something failed").Str("someid", id).Int("count", in.Count).Build()
//
// You can also add extra-data to an error with Extra(..)
// in contrast to metadata is extradata always printed in the resulting error and is more intended for additional (programmatically readable) data in addition to the errortype
// (metadata is more internal debug info/help)
//
// You can change the errortype with `.User()` and `.System()` (User-errors are 400 and System-errors 500)
// You can also manually set the statuscode with `.WithStatuscode(http.NotFound)`
// You can set the type with `WithType(..)`
@@ -55,23 +59,12 @@ import (
// => Wrap/New + Fatal
//
var stackSkipLogger zerolog.Logger
func init() {
cw := zerolog.ConsoleWriter{
Out: os.Stdout,
TimeFormat: "2006-01-02 15:04:05 Z07:00",
}
multi := zerolog.MultiLevelWriter(cw)
stackSkipLogger = zerolog.New(multi).With().Timestamp().CallerWithSkipFrameCount(4).Logger()
}
type Builder struct {
wrappedErr error
errorData *ExErr
containsGinData bool
noLog bool
wrappedErr error
errorData *ExErr
containsGinData bool
containsContextData bool
noLog bool
}
func Get(err error) *Builder {
@@ -87,12 +80,14 @@ func Wrap(err error, msg string) *Builder {
return &Builder{errorData: newExErr(CatSystem, TypeInternal, msg)} // prevent NPE if we call Wrap with err==nil
}
v := FromError(err)
if !pkgconfig.RecursiveErrors {
v := FromError(err)
v.Message = msg
return &Builder{wrappedErr: err, errorData: v}
} else {
return &Builder{wrappedErr: err, errorData: wrapExErr(v, msg, CatWrap, 1)}
}
return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)}
}
// ----------------------------------------------------------------------------
@@ -309,27 +304,27 @@ func (b *Builder) Errs(key string, val []error) *Builder {
func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder {
if v := ctx.Value("start_timestamp"); v != nil {
if t, ok := v.(time.Time); ok {
b.Time("ctx.startTimestamp", t)
b.Time("ctx.endTimestamp", time.Now())
b.Time("ctx_startTimestamp", t)
b.Time("ctx_endTimestamp", time.Now())
}
}
b.Str("gin.method", req.Method)
b.Str("gin.path", g.FullPath())
b.Strs("gin.header", extractHeader(g.Request.Header))
b.Str("gin_method", req.Method)
b.Str("gin_path", g.FullPath())
b.Strs("gin_header", extractHeader(g.Request.Header))
if req.URL != nil {
b.Str("gin.url", req.URL.String())
b.Str("gin_url", req.URL.String())
}
if ctxVal := g.GetString("apiversion"); ctxVal != "" {
b.Str("gin.context.apiversion", ctxVal)
b.Str("gin_context_apiversion", ctxVal)
}
if ctxVal := g.GetString("uid"); ctxVal != "" {
b.Str("gin.context.uid", ctxVal)
b.Str("gin_context_uid", ctxVal)
}
if ctxVal := g.GetString("fcmId"); ctxVal != "" {
b.Str("gin.context.fcmid", ctxVal)
b.Str("gin_context_fcmid", ctxVal)
}
if ctxVal := g.GetString("reqid"); ctxVal != "" {
b.Str("gin.context.reqid", ctxVal)
b.Str("gin_context_reqid", ctxVal)
}
if req.Method != "GET" && req.Body != nil {
@@ -340,12 +335,12 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
var prettyJSON bytes.Buffer
err = json.Indent(&prettyJSON, bin, "", " ")
if err == nil {
b.Str("gin.body", string(prettyJSON.Bytes()))
b.Str("gin_body", string(prettyJSON.Bytes()))
} else {
b.Bytes("gin.body", bin)
b.Bytes("gin_body", bin)
}
} else {
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
}
}
}
@@ -355,9 +350,9 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
if bin, err := brc.BufferedAll(); err == nil {
if len(bin) < 16*1024 {
b.Bytes("gin.body", bin)
b.Bytes("gin_body", bin)
} else {
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
}
}
}
@@ -365,31 +360,18 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
}
pkgconfig.ExtendGinMeta(ctx, b, g, req)
b.containsGinData = true
return b
}
func formatHeader(header map[string][]string) string {
ml := 1
for k, _ := range header {
if len(k) > ml {
ml = len(k)
}
}
r := ""
for k, v := range header {
if r != "" {
r += "\n"
}
for _, hval := range v {
value := hval
value = strings.ReplaceAll(value, "\n", "\\n")
value = strings.ReplaceAll(value, "\r", "\\r")
value = strings.ReplaceAll(value, "\t", "\\t")
r += langext.StrPadRight(k, " ", ml) + " := " + value
}
}
return r
func (b *Builder) CtxData(method Method, ctx context.Context) *Builder {
pkgconfig.ExtendContextMeta(b, method, ctx)
b.containsContextData = true
return b
}
func extractHeader(header map[string][]string) []string {
@@ -408,21 +390,35 @@ func extractHeader(header map[string][]string) []string {
// ----------------------------------------------------------------------------
// Extra adds additional data to the error
// this is not like the other metadata (like Id(), Str(), etc)
// this data is public and will be printed/outputted
func (b *Builder) Extra(key string, val any) *Builder {
b.errorData.Extra[key] = val
return b
}
// ----------------------------------------------------------------------------
// Build creates a new error, ready to pass up the stack
// If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout
// Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces
// Can be locally suppressed with Builder.NoLog()
func (b *Builder) Build() error {
func (b *Builder) Build(ctxs ...context.Context) error {
warnOnPkgConfigNotInitialized()
for _, dctx := range ctxs {
b.CtxData(MethodBuild, dctx)
}
if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil {
return b.wrappedErr
}
if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) {
b.errorData.ShortLog(stackSkipLogger.Error())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Error())
} else if pkgconfig.ZeroLogAllTraces && !b.noLog {
b.errorData.ShortLog(stackSkipLogger.Error())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Error())
}
b.errorData.CallListener(MethodBuild)
@@ -439,12 +435,14 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) {
b.GinReq(ctx, g, g.Request)
}
b.CtxData(MethodOutput, ctx)
b.errorData.Output(g)
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
b.errorData.Log(stackSkipLogger.Error())
} else if b.errorData.Severity == SevWarn {
b.errorData.Log(stackSkipLogger.Warn())
if (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) && (pkgconfig.ZeroLogErrGinOutput || pkgconfig.ZeroLogAllGinOutput) {
b.errorData.Log(pkgconfig.ZeroLogger.Error())
} else if (b.errorData.Severity == SevWarn) && (pkgconfig.ZeroLogAllGinOutput) {
b.errorData.Log(pkgconfig.ZeroLogger.Warn())
}
b.errorData.CallListener(MethodOutput)
@@ -452,14 +450,26 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) {
// Print prints the error
// If the error is SevErr we also send it to the error-service
func (b *Builder) Print() {
func (b *Builder) Print(ctxs ...context.Context) Proxy {
warnOnPkgConfigNotInitialized()
for _, dctx := range ctxs {
b.CtxData(MethodPrint, dctx)
}
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
b.errorData.Log(stackSkipLogger.Error())
b.errorData.Log(pkgconfig.ZeroLogger.Error())
} else if b.errorData.Severity == SevWarn {
b.errorData.ShortLog(stackSkipLogger.Warn())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Warn())
} else if b.errorData.Severity == SevInfo {
b.errorData.ShortLog(pkgconfig.ZeroLogger.Info())
} else {
b.errorData.ShortLog(pkgconfig.ZeroLogger.Debug())
}
b.errorData.CallListener(MethodPrint)
return Proxy{v: *b.errorData} // we return Proxy<Exerr> here instead of Exerr to prevent warnings on ignored err-returns
}
func (b *Builder) Format(level LogPrintLevel) string {
@@ -468,9 +478,15 @@ func (b *Builder) Format(level LogPrintLevel) string {
// Fatal prints the error and terminates the program
// If the error is SevErr we also send it to the error-service
func (b *Builder) Fatal() {
func (b *Builder) Fatal(ctxs ...context.Context) {
b.errorData.Severity = SevFatal
b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel))
for _, dctx := range ctxs {
b.CtxData(MethodFatal, dctx)
}
b.errorData.Log(pkgconfig.ZeroLogger.WithLevel(zerolog.FatalLevel))
b.errorData.CallListener(MethodFatal)

View File

@@ -12,11 +12,59 @@ import (
var reflectTypeStr = reflect.TypeOf("")
func FromError(err error) *ExErr {
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := err.(*ExErr); ok {
// A simple ExErr
return verr
}
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := err.(langext.PanicWrappedErr); ok {
return &ExErr{
UniqueID: newID(),
Category: CatForeign,
Type: TypePanic,
Severity: SevErr,
Timestamp: time.Time{},
StatusCode: nil,
Message: "A panic occured",
WrappedErrType: fmt.Sprintf("%T", verr),
WrappedErr: err,
Caller: "",
OriginalError: nil,
Meta: MetaMap{
"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())},
"panic_type": {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())},
"stack": {DataType: MDTString, Value: verr.Stack},
},
Extra: make(map[string]any),
}
}
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := err.(*langext.PanicWrappedErr); ok && verr != nil {
return &ExErr{
UniqueID: newID(),
Category: CatForeign,
Type: TypePanic,
Severity: SevErr,
Timestamp: time.Time{},
StatusCode: nil,
Message: "A panic occured",
WrappedErrType: fmt.Sprintf("%T", verr),
WrappedErr: err,
Caller: "",
OriginalError: nil,
Meta: MetaMap{
"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())},
"panic_type": {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())},
"stack": {DataType: MDTString, Value: verr.Stack},
},
Extra: make(map[string]any),
}
}
// A foreign error (eg a MongoDB exception)
return &ExErr{
UniqueID: newID(),
@@ -31,6 +79,7 @@ func FromError(err error) *ExErr {
Caller: "",
OriginalError: nil,
Meta: getForeignMeta(err),
Extra: make(map[string]any),
}
}
@@ -48,6 +97,7 @@ func newExErr(cat ErrorCategory, errtype ErrorType, msg string) *ExErr {
Caller: callername(2),
OriginalError: nil,
Meta: make(map[string]MetaValue),
Extra: make(map[string]any),
}
}
@@ -56,7 +106,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE
UniqueID: newID(),
Category: cat,
Type: TypeWrap,
Severity: SevErr,
Severity: e.Severity,
Timestamp: time.Now(),
StatusCode: e.StatusCode,
Message: msg,
@@ -65,6 +115,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE
Caller: callername(1 + stacktraceskip),
OriginalError: e,
Meta: make(map[string]MetaValue),
Extra: langext.CopyMap(langext.ForceMap(e.Extra)),
}
}
@@ -181,7 +232,7 @@ func getReflectedMetaValues(value interface{}, remainingDepth int) map[string]Me
jsonval, err := json.Marshal(value)
if err != nil {
panic(err) // gets recovered later up
return map[string]MetaValue{"": {DataType: MDTString, Value: fmt.Sprintf("Failed to Marshal %T:\n%+v", value, value)}}
}
return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}}

View File

@@ -1,91 +1,14 @@
package exerr
import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
type Method string
const (
MethodOutput Method = "OUTPUT"
MethodPrint Method = "PRINT"
MethodBuild Method = "BUILD"
MethodFatal Method = "FATAL"
)
type ErrorCategory struct{ Category string }
var (
CatWrap = ErrorCategory{"Wrap"} // The error is simply wrapping another error (e.g. when a grpc call returns an error)
CatSystem = ErrorCategory{"System"} // An internal system error (e.g. connection to db failed)
CatUser = ErrorCategory{"User"} // The user (the API caller) did something wrong (e.g. he has no permissions to do this)
CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value
)
//goland:noinspection GoUnusedGlobalVariable
var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign}
type ErrorSeverity struct{ Severity string }
var (
SevTrace = ErrorSeverity{"Trace"}
SevDebug = ErrorSeverity{"Debug"}
SevInfo = ErrorSeverity{"Info"}
SevWarn = ErrorSeverity{"Warn"}
SevErr = ErrorSeverity{"Err"}
SevFatal = ErrorSeverity{"Fatal"}
)
//goland:noinspection GoUnusedGlobalVariable
var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal}
type ErrorType struct {
Key string
DefaultStatusCode *int
}
//goland:noinspection GoUnusedGlobalVariable
var (
TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
TypePanic = NewType("PANIC", langext.Ptr(500))
TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
TypeMongoInvalidOpt = NewType("MONGO_INVALIDOPT", langext.Ptr(500))
TypeSQLQuery = NewType("SQL_QUERY", langext.Ptr(500))
TypeSQLBuild = NewType("SQL_BUILD", langext.Ptr(500))
TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500))
TypeWrap = NewType("Wrap", nil)
TypeBindFailURI = NewType("BINDFAIL_URI", langext.Ptr(400))
TypeBindFailQuery = NewType("BINDFAIL_QUERY", langext.Ptr(400))
TypeBindFailJSON = NewType("BINDFAIL_JSON", langext.Ptr(400))
TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400))
TypeBindFailHeader = NewType("BINDFAIL_HEADER", langext.Ptr(400))
TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400))
TypeInvalidCSID = NewType("INVALID_CSID", langext.Ptr(400))
TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400))
TypeGoogleResponse = NewType("GOOGLE_RESPONSE", langext.Ptr(400))
TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401))
TypeAuthFailed = NewType("AUTH_FAILED", langext.Ptr(401))
// other values come from the downstream application that uses goext
)
var registeredTypes = dataext.SyncMap[string, ErrorType]{}
func NewType(key string, defStatusCode *int) ErrorType {
et := ErrorType{key, defStatusCode}
registeredTypes.Set(key, et)
return et
}
func ListRegisteredTypes() []ErrorType {
return registeredTypes.GetAllValues()
}
type LogPrintLevel string
const (

89
exerr/dataCategory.go Normal file
View File

@@ -0,0 +1,89 @@
package exerr
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"reflect"
)
type ErrorCategory struct{ Category string }
var (
CatWrap = ErrorCategory{"Wrap"} // The error is simply wrapping another error (e.g. when a grpc call returns an error)
CatSystem = ErrorCategory{"System"} // An internal system error (e.g. connection to db failed)
CatUser = ErrorCategory{"User"} // The user (the API caller) did something wrong (e.g. he has no permissions to do this)
CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value
)
func (e *ErrorCategory) UnmarshalJSON(bytes []byte) error {
return json.Unmarshal(bytes, &e.Category)
}
func (e ErrorCategory) MarshalJSON() ([]byte, error) {
return json.Marshal(e.Category)
}
func (e *ErrorCategory) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*e = ErrorCategory{}
return nil
}
if bt != bson.TypeString {
return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt))
}
var tt string
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
*e = ErrorCategory{tt}
return nil
}
func (e ErrorCategory) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(e.Category)
}
func (e ErrorCategory) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = e.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&e))
} else {
val.Set(reflect.ValueOf(e))
}
return nil
}
//goland:noinspection GoUnusedGlobalVariable
var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign}

91
exerr/dataSeverity.go Normal file
View File

@@ -0,0 +1,91 @@
package exerr
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"reflect"
)
type ErrorSeverity struct{ Severity string }
var (
SevTrace = ErrorSeverity{"Trace"}
SevDebug = ErrorSeverity{"Debug"}
SevInfo = ErrorSeverity{"Info"}
SevWarn = ErrorSeverity{"Warn"}
SevErr = ErrorSeverity{"Err"}
SevFatal = ErrorSeverity{"Fatal"}
)
func (e *ErrorSeverity) UnmarshalJSON(bytes []byte) error {
return json.Unmarshal(bytes, &e.Severity)
}
func (e ErrorSeverity) MarshalJSON() ([]byte, error) {
return json.Marshal(e.Severity)
}
func (e *ErrorSeverity) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*e = ErrorSeverity{}
return nil
}
if bt != bson.TypeString {
return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt))
}
var tt string
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
*e = ErrorSeverity{tt}
return nil
}
func (e ErrorSeverity) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(e.Severity)
}
func (e ErrorSeverity) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = e.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&e))
} else {
val.Set(reflect.ValueOf(e))
}
return nil
}
//goland:noinspection GoUnusedGlobalVariable
var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal}

156
exerr/dataType.go Normal file
View File

@@ -0,0 +1,156 @@
package exerr
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
)
type ErrorType struct {
Key string
DefaultStatusCode *int
}
//goland:noinspection GoUnusedGlobalVariable
var (
TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
TypePanic = NewType("PANIC", langext.Ptr(500))
TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
TypeAssert = NewType("ASSERT", langext.Ptr(500))
TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
TypeMongoInvalidOpt = NewType("MONGO_INVALIDOPT", langext.Ptr(500))
TypeSQLQuery = NewType("SQL_QUERY", langext.Ptr(500))
TypeSQLBuild = NewType("SQL_BUILD", langext.Ptr(500))
TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500))
TypeWrap = NewType("Wrap", nil)
TypeBindFailURI = NewType("BINDFAIL_URI", langext.Ptr(400))
TypeBindFailQuery = NewType("BINDFAIL_QUERY", langext.Ptr(400))
TypeBindFailJSON = NewType("BINDFAIL_JSON", langext.Ptr(400))
TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400))
TypeBindFailHeader = NewType("BINDFAIL_HEADER", langext.Ptr(400))
TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400))
TypeInvalidCSID = NewType("INVALID_CSID", langext.Ptr(400))
TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400))
TypeGoogleResponse = NewType("GOOGLE_RESPONSE", langext.Ptr(400))
TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401))
TypeAuthFailed = NewType("AUTH_FAILED", langext.Ptr(401))
TypeInvalidImage = NewType("IMAGEEXT_INVALID_IMAGE", langext.Ptr(400))
TypeInvalidMimeType = NewType("IMAGEEXT_INVALID_MIMETYPE", langext.Ptr(400))
// other values come from the downstream application that uses goext
)
func (e *ErrorType) UnmarshalJSON(bytes []byte) error {
var k string
err := json.Unmarshal(bytes, &k)
if err != nil {
return err
}
if d, ok := registeredTypes.Get(k); ok {
*e = d
return nil
} else {
*e = ErrorType{k, nil}
return nil
}
}
func (e ErrorType) MarshalJSON() ([]byte, error) {
return json.Marshal(e.Key)
}
func (e *ErrorType) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*e = ErrorType{}
return nil
}
if bt != bson.TypeString {
return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt))
}
var tt string
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
if d, ok := registeredTypes.Get(tt); ok {
*e = d
return nil
} else {
*e = ErrorType{tt, nil}
return nil
}
}
func (e ErrorType) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(e.Key)
}
func (e ErrorType) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = e.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&e))
} else {
val.Set(reflect.ValueOf(e))
}
return nil
}
var registeredTypes = dataext.SyncMap[string, ErrorType]{}
func NewType(key string, defStatusCode *int) ErrorType {
et := ErrorType{key, defStatusCode}
registeredTypes.Set(key, et)
return et
}
func ListRegisteredTypes() []ErrorType {
return registeredTypes.GetAllValues()
}

153
exerr/data_test.go Normal file
View File

@@ -0,0 +1,153 @@
package exerr
import (
"context"
"encoding/json"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
"time"
)
func TestJSONMarshalErrorCategory(t *testing.T) {
c1 := CatSystem
jsonbin := tst.Must(json.Marshal(c1))(t)
var c2 ErrorCategory
tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2))
tst.AssertEqual(t, c1, c2)
tst.AssertEqual(t, string(jsonbin), "\"System\"")
}
func TestJSONMarshalErrorSeverity(t *testing.T) {
c1 := SevErr
jsonbin := tst.Must(json.Marshal(c1))(t)
var c2 ErrorSeverity
tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2))
tst.AssertEqual(t, c1, c2)
tst.AssertEqual(t, string(jsonbin), "\"Err\"")
}
func TestJSONMarshalErrorType(t *testing.T) {
c1 := TypeNotImplemented
jsonbin := tst.Must(json.Marshal(c1))(t)
var c2 ErrorType
tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2))
tst.AssertEqual(t, c1, c2)
tst.AssertEqual(t, string(jsonbin), "\"NOT_IMPLEMENTED\"")
}
func TestBSONMarshalErrorCategory(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond)
defer cancel()
client, err := mongo.Connect(ctx)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
err = client.Ping(ctx, nil)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
primimd := primitive.NewObjectID()
_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": CatSystem})
tst.AssertNoErr(t, err)
cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}})
var c1 struct {
ID primitive.ObjectID `bson:"_id"`
Val ErrorCategory `bson:"val"`
}
err = cursor.Decode(&c1)
tst.AssertNoErr(t, err)
tst.AssertEqual(t, c1.Val, CatSystem)
}
func TestBSONMarshalErrorSeverity(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond)
defer cancel()
client, err := mongo.Connect(ctx)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
err = client.Ping(ctx, nil)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
primimd := primitive.NewObjectID()
_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": SevErr})
tst.AssertNoErr(t, err)
cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}})
var c1 struct {
ID primitive.ObjectID `bson:"_id"`
Val ErrorSeverity `bson:"val"`
}
err = cursor.Decode(&c1)
tst.AssertNoErr(t, err)
tst.AssertEqual(t, c1.Val, SevErr)
}
func TestBSONMarshalErrorType(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond)
defer cancel()
client, err := mongo.Connect(ctx)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
err = client.Ping(ctx, nil)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
primimd := primitive.NewObjectID()
_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": TypeNotImplemented})
tst.AssertNoErr(t, err)
cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}})
var c1 struct {
ID primitive.ObjectID `bson:"_id"`
Val ErrorType `bson:"val"`
}
err = cursor.Decode(&c1)
tst.AssertNoErr(t, err)
tst.AssertEqual(t, c1.Val, TypeNotImplemented)
}

View File

@@ -1,19 +1,29 @@
package exerr
import (
"context"
"fmt"
"github.com/gin-gonic/gin"
"github.com/rs/zerolog"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"net/http"
"os"
)
type ErrorPackageConfig struct {
ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal)
ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities)
RecursiveErrors bool // errors contains their Origin-Error
ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output()
IncludeMetaInGinOutput bool // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output()
ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields
DisableErrorWrapping bool // Disables the exerr.Wrap()...Build() function - will always return the original error
ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal)
ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities)
RecursiveErrors bool // errors contains their Origin-Error
ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output()
IncludeMetaInGinOutput bool // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output()
ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields
DisableErrorWrapping bool // Disables the exerr.Wrap()...Build() function - will always return the original error
ZeroLogErrGinOutput bool // autom print zerolog logs on ginext.Error() / .Output(gin) (for SevErr and SevFatal)
ZeroLogAllGinOutput bool // autom print zerolog logs on ginext.Error() / .Output(gin) (for all Severities)
ExtendGinMeta func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) // (Optionally) extend the final error meta values with additional data from the gin context (a few are automatically added, here more can be included)
ExtendContextMeta func(b *Builder, method Method, dctx context.Context) // (Optionally) extend the final error meta values with additional data from the context (a few are automatically added, here more can be included)
ZeroLogger zerolog.Logger // The logger used to print exerr log messages
}
type ErrorPackageConfigInit struct {
@@ -25,6 +35,11 @@ type ErrorPackageConfigInit struct {
ExtendGinOutput func(err *ExErr, json map[string]any)
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any)
DisableErrorWrapping *bool
ZeroLogErrGinOutput *bool
ZeroLogAllGinOutput *bool
ExtendGinMeta func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request)
ExtendContextMeta func(b *Builder, method Method, dctx context.Context)
ZeroLogger *zerolog.Logger
}
var initialized = false
@@ -38,6 +53,10 @@ var pkgconfig = ErrorPackageConfig{
ExtendGinOutput: func(err *ExErr, json map[string]any) {},
ExtendGinDataOutput: func(err *ExErr, depth int, json map[string]any) {},
DisableErrorWrapping: false,
ZeroLogErrGinOutput: true,
ZeroLogAllGinOutput: false,
ExtendGinMeta: func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {},
ExtendContextMeta: func(b *Builder, method Method, dctx context.Context) {},
}
// Init initializes the exerr packages
@@ -50,6 +69,8 @@ func Init(cfg ErrorPackageConfigInit) {
ego := func(err *ExErr, json map[string]any) {}
egdo := func(err *ExErr, depth int, json map[string]any) {}
egm := func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {}
egcm := func(b *Builder, method Method, dctx context.Context) {}
if cfg.ExtendGinOutput != nil {
ego = cfg.ExtendGinOutput
@@ -57,6 +78,19 @@ func Init(cfg ErrorPackageConfigInit) {
if cfg.ExtendGinDataOutput != nil {
egdo = cfg.ExtendGinDataOutput
}
if cfg.ExtendGinMeta != nil {
egm = cfg.ExtendGinMeta
}
if cfg.ExtendContextMeta != nil {
egcm = cfg.ExtendContextMeta
}
var logger zerolog.Logger
if cfg.ZeroLogger != nil {
logger = *cfg.ZeroLogger
} else {
logger = newDefaultLogger()
}
pkgconfig = ErrorPackageConfig{
ZeroLogErrTraces: langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces),
@@ -67,11 +101,27 @@ func Init(cfg ErrorPackageConfigInit) {
ExtendGinOutput: ego,
ExtendGinDataOutput: egdo,
DisableErrorWrapping: langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping),
ZeroLogAllGinOutput: langext.Coalesce(cfg.ZeroLogAllGinOutput, pkgconfig.ZeroLogAllGinOutput),
ZeroLogErrGinOutput: langext.Coalesce(cfg.ZeroLogErrGinOutput, pkgconfig.ZeroLogErrGinOutput),
ExtendGinMeta: egm,
ExtendContextMeta: egcm,
ZeroLogger: logger,
}
initialized = true
}
func newDefaultLogger() zerolog.Logger {
cw := zerolog.ConsoleWriter{
Out: os.Stdout,
TimeFormat: "2006-01-02 15:04:05 Z07:00",
}
multi := zerolog.MultiLevelWriter(cw)
return zerolog.New(multi).With().Timestamp().CallerWithSkipFrameCount(4).Logger()
}
func Initialized() bool {
return initialized
}

View File

@@ -1,6 +1,7 @@
package exerr
import (
"fmt"
"github.com/rs/xid"
"github.com/rs/zerolog"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
@@ -26,7 +27,8 @@ type ExErr struct {
OriginalError *ExErr `json:"originalError"`
Meta MetaMap `json:"meta"`
Extra map[string]any `json:"extra"`
Meta MetaMap `json:"meta"`
}
func (ee *ExErr) Error() string {
@@ -36,6 +38,13 @@ func (ee *ExErr) Error() string {
// Unwrap must be implemented so that some error.XXX methods work
func (ee *ExErr) Unwrap() error {
if ee.OriginalError == nil {
if ee.WrappedErr != nil {
if werr, ok := ee.WrappedErr.(error); ok {
return werr
}
}
return nil // this is neccessary - otherwise we return a wrapped nil and the `x == nil` comparison fails (= panic in errors.Is and other failures)
}
return ee.OriginalError
@@ -81,9 +90,29 @@ func (ee *ExErr) Log(evt *zerolog.Event) {
}
func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
// [LogPrintShort]
//
// - Only print message and type
// - Used e.g. for logging to the console when Build is called
// - also used in Print() if level == Warn/Info
//
// [LogPrintOverview]
//
// - print message, extra and errortrace
//
// [LogPrintFull]
//
// - print full error, with meta and extra, and trace, etc
// - Used in Output() and Print()
//
if lvl == LogPrintShort {
msg := ee.Message
if msg == "" {
msg = ee.RecursiveMessage()
}
if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign {
msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")"
}
@@ -98,6 +127,10 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
for exk, exv := range ee.Extra {
str += fmt.Sprintf(" # [[[ %s ==> %v ]]]\n", exk, exv)
}
indent := ""
for curr := ee; curr != nil; curr = curr.OriginalError {
indent += " "
@@ -119,12 +152,16 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
for exk, exv := range ee.Extra {
str += fmt.Sprintf(" # [[[ %s ==> %v ]]]\n", exk, exv)
}
indent := ""
for curr := ee; curr != nil; curr = curr.OriginalError {
indent += " "
etype := ee.Type.Key
if ee.Type == TypeWrap {
etype := curr.Type.Key
if curr.Type == TypeWrap {
etype = "~"
}
@@ -325,6 +362,22 @@ func (ee *ExErr) GetMetaTime(key string) (time.Time, bool) {
return time.Time{}, false
}
func (ee *ExErr) GetExtra(key string) (any, bool) {
if v, ok := ee.Extra[key]; ok {
return v, true
}
return nil, false
}
func (ee *ExErr) UniqueIDs() []string {
ids := []string{ee.UniqueID}
for curr := ee; curr != nil; curr = curr.OriginalError {
ids = append(ids, curr.UniqueID)
}
return ids
}
// contains test if the supplied error is contained in this error (anywhere in the chain)
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
if original == nil {

View File

@@ -2,10 +2,19 @@ package exerr
import (
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"os"
"testing"
)
func TestMain(m *testing.M) {
if !Initialized() {
Init(ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse})
}
os.Exit(m.Run())
}
type golangErr struct {
Message string
}

View File

@@ -15,10 +15,10 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la
ginJson["id"] = ee.UniqueID
}
if ee.Category != CatWrap {
ginJson["category"] = ee.Category
ginJson["category"] = ee.Category.Category
}
if ee.Type != TypeWrap {
ginJson["type"] = ee.Type
ginJson["type"] = ee.Type.Key
}
if ee.StatusCode != nil {
ginJson["statuscode"] = ee.StatusCode
@@ -30,7 +30,7 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la
ginJson["caller"] = ee.Caller
}
if ee.Severity != SevErr {
ginJson["severity"] = ee.Severity
ginJson["severity"] = ee.Severity.Severity
}
if ee.Timestamp != (time.Time{}) {
ginJson["time"] = ee.Timestamp.Format(time.RFC3339)
@@ -48,6 +48,12 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la
metaJson[metaKey] = metaVal.rawValueForJson()
}
ginJson["meta"] = metaJson
extraJson := langext.H{}
for extraKey, extraVal := range ee.Extra {
extraJson[extraKey] = extraVal
}
ginJson["extra"] = extraJson
}
if applyExtendListener {
@@ -90,6 +96,20 @@ func (ee *ExErr) ToAPIJson(applyExtendListener bool, includeWrappedErrors bool,
apiOutput["__data"] = ee.toJson(0, applyExtendListener, includeMetaFields)
}
for exkey, exval := range ee.Extra {
// ensure we do not override existing values
for {
if _, ok := apiOutput[exkey]; ok {
exkey = "_" + exkey
} else {
break
}
}
apiOutput[exkey] = exval
}
if applyExtendListener {
pkgconfig.ExtendGinOutput(ee, apiOutput)
}

View File

@@ -86,3 +86,41 @@ func MessageMatch(e error, matcher func(string) bool) bool {
return false
}
// OriginalError returns the lowest level error, probably the original/external error that was originally wrapped
func OriginalError(e error) error {
if e == nil {
return nil
}
//goland:noinspection GoTypeAssertionOnErrors
bmerr, ok := e.(*ExErr)
for !ok {
return e
}
for bmerr.OriginalError != nil {
bmerr = bmerr.OriginalError
}
if bmerr.WrappedErr != nil {
if werr, ok := bmerr.WrappedErr.(error); ok {
return werr
}
}
return bmerr
}
func UniqueID(v error) *string {
if v == nil {
return nil
}
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := v.(*ExErr); ok {
return &verr.UniqueID
}
return nil
}

View File

@@ -4,15 +4,6 @@ import (
"sync"
)
type Method string
const (
MethodOutput Method = "OUTPUT"
MethodPrint Method = "PRINT"
MethodBuild Method = "BUILD"
MethodFatal Method = "FATAL"
)
type Listener = func(method Method, v *ExErr)
var listenerLock = sync.Mutex{}

13
exerr/proxy.go Normal file
View File

@@ -0,0 +1,13 @@
package exerr
type Proxy struct {
v ExErr
}
func (p *Proxy) UniqueID() string {
return p.v.UniqueID
}
func (p *Proxy) Get() ExErr {
return p.v
}

View File

@@ -3,13 +3,17 @@ package ginext
import (
"github.com/gin-gonic/gin"
"net/http"
"strings"
)
func CorsMiddleware() gin.HandlerFunc {
func CorsMiddleware(allowheader []string, exposeheader []string) gin.HandlerFunc {
return func(c *gin.Context) {
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
c.Writer.Header().Set("Access-Control-Allow-Credentials", "true")
c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With")
c.Writer.Header().Set("Access-Control-Allow-Headers", strings.Join(allowheader, ", "))
if len(exposeheader) > 0 {
c.Writer.Header().Set("Access-Control-Expose-Headers", strings.Join(exposeheader, ", "))
}
c.Writer.Header().Set("Access-Control-Allow-Methods", "OPTIONS, GET, POST, PUT, PATCH, DELETE, COUNT")
if c.Request.Method == "OPTIONS" {

View File

@@ -19,13 +19,18 @@ type GinWrapper struct {
engine *gin.Engine
suppressGinLogs bool
opt Options
allowCors bool
corsAllowHeader []string
corsExposeHeader []string
ginDebug bool
bufferBody bool
requestTimeout time.Duration
listenerBeforeRequest []func(g *gin.Context)
listenerAfterRequest []func(g *gin.Context, resp HTTPResponse)
buildRequestBindError func(g *gin.Context, fieldtype string, err error) HTTPResponse
routeSpecs []ginRouteSpec
}
@@ -37,53 +42,64 @@ type ginRouteSpec struct {
}
type Options struct {
AllowCors *bool // Add cors handler to allow all CORS requests on the default http methods
GinDebug *bool // Set gin.debug to true (adds more logs)
SuppressGinLogs *bool // Suppress our custom gin logs (even if GinDebug == true)
BufferBody *bool // Buffers the input body stream, this way the ginext error handler can later include the whole request body
Timeout *time.Duration // The default handler timeout
ListenerBeforeRequest []func(g *gin.Context) // Register listener that are called before the handler method
ListenerAfterRequest []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method
AllowCors *bool // Add cors handler to allow all CORS requests on the default http methods
CorsAllowHeader *[]string // override the default values of Access-Control-Allow-Headers (AllowCors must be true)
CorsExposeHeader *[]string // return Access-Control-Expose-Headers (AllowCors must be true)
GinDebug *bool // Set gin.debug to true (adds more logs)
SuppressGinLogs *bool // Suppress our custom gin logs (even if GinDebug == true)
BufferBody *bool // Buffers the input body stream, this way the ginext error handler can later include the whole request body
Timeout *time.Duration // The default handler timeout
ListenerBeforeRequest []func(g *gin.Context) // Register listener that are called before the handler method
ListenerAfterRequest []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method
DebugTrimHandlerPrefixes []string // Trim these prefixes from the handler names in the debug print
DebugReplaceHandlerNames map[string]string // Replace handler names in debug output
BuildRequestBindError func(g *gin.Context, fieldtype string, err error) HTTPResponse // Override function which generates the HTTPResponse errors that are returned by the preContext..Start() methids
}
// NewEngine creates a new (wrapped) ginEngine
func NewEngine(opt Options) *GinWrapper {
ginDebug := langext.Coalesce(opt.GinDebug, true)
if ginDebug {
gin.SetMode(gin.DebugMode)
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
} else {
gin.SetMode(gin.ReleaseMode)
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
}
engine := gin.New()
wrapper := &GinWrapper{
engine: engine,
opt: opt,
suppressGinLogs: langext.Coalesce(opt.SuppressGinLogs, false),
allowCors: langext.Coalesce(opt.AllowCors, false),
ginDebug: langext.Coalesce(opt.GinDebug, true),
corsAllowHeader: langext.Coalesce(opt.CorsAllowHeader, []string{"Content-Type", "Content-Length", "Accept-Encoding", "X-CSRF-Token", "Authorization", "accept", "origin", "Cache-Control", "X-Requested-With"}),
corsExposeHeader: langext.Coalesce(opt.CorsExposeHeader, []string{}),
ginDebug: ginDebug,
bufferBody: langext.Coalesce(opt.BufferBody, false),
requestTimeout: langext.Coalesce(opt.Timeout, 24*time.Hour),
listenerBeforeRequest: opt.ListenerBeforeRequest,
listenerAfterRequest: opt.ListenerAfterRequest,
buildRequestBindError: langext.Conditional(opt.BuildRequestBindError == nil, defaultBuildRequestBindError, opt.BuildRequestBindError),
}
engine.RedirectFixedPath = false
engine.RedirectTrailingSlash = false
if wrapper.allowCors {
engine.Use(CorsMiddleware())
engine.Use(CorsMiddleware(wrapper.corsAllowHeader, wrapper.corsExposeHeader))
}
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
if !wrapper.ginDebug {
gin.SetMode(gin.ReleaseMode)
if !wrapper.suppressGinLogs {
ginlogger := gin.Logger()
engine.Use(func(context *gin.Context) {
ginlogger(context)
})
}
} else {
gin.SetMode(gin.DebugMode)
if ginDebug && !wrapper.suppressGinLogs {
ginlogger := gin.Logger()
engine.Use(func(context *gin.Context) { ginlogger(context) })
}
return wrapper
}
@@ -185,6 +201,18 @@ func (w *GinWrapper) cleanMiddlewareName(fname string) string {
}
}
for _, pfx := range w.opt.DebugTrimHandlerPrefixes {
if strings.HasPrefix(fname, pfx) {
fname = fname[len(pfx):]
}
}
for k, v := range langext.ForceMap(w.opt.DebugReplaceHandlerNames) {
if strings.EqualFold(fname, k) {
fname = v
}
}
return fname
}
@@ -200,3 +228,11 @@ func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder {
func (w *GinWrapper) ForwardRequest(writer http.ResponseWriter, req *http.Request) {
w.engine.ServeHTTP(writer, req)
}
func (w *GinWrapper) ListRoutes() []gin.RouteInfo {
return w.engine.Routes()
}
func defaultBuildRequestBindError(g *gin.Context, fieldtype string, err error) HTTPResponse {
return Error(err)
}

9
ginext/jsonFilter.go Normal file
View File

@@ -0,0 +1,9 @@
package ginext
import "github.com/gin-gonic/gin"
var jsonFilterKey = "goext.jsonfilter"
func SetJSONFilter(g *gin.Context, filter string) {
g.Set(jsonFilterKey, filter)
}

View File

@@ -15,16 +15,17 @@ import (
)
type PreContext struct {
ginCtx *gin.Context
wrapper *GinWrapper
uri any
query any
body any
rawbody *[]byte
form any
header any
timeout *time.Duration
persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func
ginCtx *gin.Context
wrapper *GinWrapper
uri any
query any
body any
rawbody *[]byte
form any
header any
timeout *time.Duration
persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func
ignoreWrongContentType bool
}
type preContextData struct {
@@ -71,6 +72,11 @@ func (pctx *PreContext) WithSession(sessionObj SessionObject) *PreContext {
return pctx
}
func (pctx *PreContext) IgnoreWrongContentType() *PreContext {
pctx.ignoreWrongContentType = true
return pctx
}
func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if pctx.uri != nil {
if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil {
@@ -78,7 +84,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailURI).
Str("struct_type", fmt.Sprintf("%T", pctx.uri)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "URI", err))
}
}
@@ -88,7 +94,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailQuery).
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "QUERY", err))
}
}
@@ -99,13 +105,15 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailJSON).
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err))
}
} else {
err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(Error(err))
if !pctx.ignoreWrongContentType {
err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err))
}
}
}
@@ -113,14 +121,14 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok {
v, err := brc.BufferedAll()
if err != nil {
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "BODY", err))
}
*pctx.rawbody = v
} else {
buf := &bytes.Buffer{}
_, err := io.Copy(buf, pctx.ginCtx.Request.Body)
if err != nil {
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "BODY", err))
}
*pctx.rawbody = buf.Bytes()
}
@@ -133,7 +141,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
} else if pctx.ginCtx.ContentType() == "application/x-www-form-urlencoded" {
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
@@ -141,13 +149,15 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
} else {
err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
if !pctx.ignoreWrongContentType {
err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
}
}
@@ -157,7 +167,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailHeader).
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "HEADER", err))
}
}
@@ -169,7 +179,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx)
if err != nil {
actx.Cancel()
return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build()))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "INIT", err))
}
}

View File

@@ -1,12 +1,8 @@
package ginext
import (
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"os"
)
type cookieval struct {
@@ -40,461 +36,10 @@ type InspectableHTTPResponse interface {
Headers() []string
}
type jsonHTTPResponse struct {
statusCode int
data any
headers []headerval
cookies []cookieval
}
type HTTPErrorResponse interface {
HTTPResponse
func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender {
var f *string
if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" {
f = &jsonfilter
}
return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f}
}
func (j jsonHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Render(j.statusCode, j.jsonRenderer(g))
}
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j jsonHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j jsonHTTPResponse) BodyString(g *gin.Context) *string {
if str, err := j.jsonRenderer(g).RenderString(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonHTTPResponse) ContentType() string {
return "application/json"
}
func (j jsonHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type emptyHTTPResponse struct {
statusCode int
headers []headerval
cookies []cookieval
}
func (j emptyHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Status(j.statusCode)
}
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j emptyHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j emptyHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j emptyHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j emptyHTTPResponse) ContentType() string {
return ""
}
func (j emptyHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type textHTTPResponse struct {
statusCode int
data string
headers []headerval
cookies []cookieval
}
func (j textHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.String(j.statusCode, "%s", j.data)
}
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j textHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j textHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j textHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(j.data)
}
func (j textHTTPResponse) ContentType() string {
return "text/plain"
}
func (j textHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type dataHTTPResponse struct {
statusCode int
data []byte
contentType string
headers []headerval
cookies []cookieval
}
func (j dataHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.contentType, j.data)
}
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j dataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j dataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j dataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j dataHTTPResponse) ContentType() string {
return j.contentType
}
func (j dataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type fileHTTPResponse struct {
mimetype string
filepath string
filename *string
headers []headerval
cookies []cookieval
}
func (j fileHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.File(j.filepath)
}
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j fileHTTPResponse) IsSuccess() bool {
return true
}
func (j fileHTTPResponse) Statuscode() int {
return 200
}
func (j fileHTTPResponse) BodyString(*gin.Context) *string {
data, err := os.ReadFile(j.filepath)
if err != nil {
return nil
}
return langext.Ptr(string(data))
}
func (j fileHTTPResponse) ContentType() string {
return j.mimetype
}
func (j fileHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type downloadDataHTTPResponse struct {
statusCode int
mimetype string
data []byte
filename *string
headers []headerval
cookies []cookieval
}
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.mimetype, j.data)
}
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j downloadDataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j downloadDataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j downloadDataHTTPResponse) ContentType() string {
return j.mimetype
}
func (j downloadDataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type redirectHTTPResponse struct {
statusCode int
url string
headers []headerval
cookies []cookieval
}
func (j redirectHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Redirect(j.statusCode, j.url)
}
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j redirectHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j redirectHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j redirectHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j redirectHTTPResponse) ContentType() string {
return ""
}
func (j redirectHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type jsonAPIErrResponse struct {
err *exerr.ExErr
headers []headerval
cookies []cookieval
}
func (j jsonAPIErrResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
j.err.Output(g)
j.err.CallListener(exerr.MethodOutput)
}
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonAPIErrResponse) IsSuccess() bool {
return false
}
func (j jsonAPIErrResponse) Statuscode() int {
return langext.Coalesce(j.err.RecursiveStatuscode(), 0)
}
func (j jsonAPIErrResponse) BodyString(*gin.Context) *string {
if str, err := j.err.ToDefaultAPIJson(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonAPIErrResponse) ContentType() string {
return "application/json"
}
func (j jsonAPIErrResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func (j jsonAPIErrResponse) Unwrap() error {
return j.err
}
func Status(sc int) HTTPResponse {
return &emptyHTTPResponse{statusCode: sc}
}
func JSON(sc int, data any) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data}
}
func Data(sc int, contentType string, data []byte) HTTPResponse {
return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data}
}
func Text(sc int, data string) HTTPResponse {
return &textHTTPResponse{statusCode: sc, data: data}
}
func File(mimetype string, filepath string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath}
}
func Download(mimetype string, filepath string, filename string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
}
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
}
func Redirect(sc int, newURL string) HTTPResponse {
return &redirectHTTPResponse{statusCode: sc, url: newURL}
}
func Error(e error) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(e),
}
}
func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
}
Error() error
}
func NotImplemented() HTTPResponse {

58
ginext/responseData.go Normal file
View File

@@ -0,0 +1,58 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type dataHTTPResponse struct {
statusCode int
data []byte
contentType string
headers []headerval
cookies []cookieval
}
func (j dataHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.contentType, j.data)
}
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j dataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j dataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j dataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j dataHTTPResponse) ContentType() string {
return j.contentType
}
func (j dataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Data(sc int, contentType string, data []byte) HTTPResponse {
return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data}
}

View File

@@ -0,0 +1,64 @@
package ginext
import (
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type downloadDataHTTPResponse struct {
statusCode int
mimetype string
data []byte
filename *string
headers []headerval
cookies []cookieval
}
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.mimetype, j.data)
}
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j downloadDataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j downloadDataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j downloadDataHTTPResponse) ContentType() string {
return j.mimetype
}
func (j downloadDataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
}

56
ginext/responseEmpty.go Normal file
View File

@@ -0,0 +1,56 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type emptyHTTPResponse struct {
statusCode int
headers []headerval
cookies []cookieval
}
func (j emptyHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Status(j.statusCode)
}
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j emptyHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j emptyHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j emptyHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j emptyHTTPResponse) ContentType() string {
return ""
}
func (j emptyHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Status(sc int) HTTPResponse {
return &emptyHTTPResponse{statusCode: sc}
}

73
ginext/responseFile.go Normal file
View File

@@ -0,0 +1,73 @@
package ginext
import (
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"os"
)
type fileHTTPResponse struct {
mimetype string
filepath string
filename *string
headers []headerval
cookies []cookieval
}
func (j fileHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.File(j.filepath)
}
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j fileHTTPResponse) IsSuccess() bool {
return true
}
func (j fileHTTPResponse) Statuscode() int {
return 200
}
func (j fileHTTPResponse) BodyString(*gin.Context) *string {
data, err := os.ReadFile(j.filepath)
if err != nil {
return nil
}
return langext.Ptr(string(data))
}
func (j fileHTTPResponse) ContentType() string {
return j.mimetype
}
func (j fileHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func File(mimetype string, filepath string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath}
}
func Download(mimetype string, filepath string, filename string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
}

78
ginext/responseJson.go Normal file
View File

@@ -0,0 +1,78 @@
package ginext
import (
"github.com/gin-gonic/gin"
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type jsonHTTPResponse struct {
statusCode int
data any
headers []headerval
cookies []cookieval
filterOverride *string
}
func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender {
var f *string
if jsonfilter := g.GetString(jsonFilterKey); jsonfilter != "" {
f = &jsonfilter
}
if j.filterOverride != nil {
f = j.filterOverride
}
return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f}
}
func (j jsonHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Render(j.statusCode, j.jsonRenderer(g))
}
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j jsonHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j jsonHTTPResponse) BodyString(g *gin.Context) *string {
if str, err := j.jsonRenderer(g).RenderString(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonHTTPResponse) ContentType() string {
return "application/json"
}
func (j jsonHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func JSON(sc int, data any) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data}
}
func JSONWithFilter(sc int, data any, f string) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data, filterOverride: &f}
}

81
ginext/responseJsonAPI.go Normal file
View File

@@ -0,0 +1,81 @@
package ginext
import (
"context"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type jsonAPIErrResponse struct {
err *exerr.ExErr
headers []headerval
cookies []cookieval
}
func (j jsonAPIErrResponse) Error() error {
return j.err
}
func (j jsonAPIErrResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
exerr.Get(j.err).Output(context.Background(), g)
j.err.CallListener(exerr.MethodOutput)
}
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonAPIErrResponse) IsSuccess() bool {
return false
}
func (j jsonAPIErrResponse) Statuscode() int {
return langext.Coalesce(j.err.RecursiveStatuscode(), 0)
}
func (j jsonAPIErrResponse) BodyString(*gin.Context) *string {
if str, err := j.err.ToDefaultAPIJson(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonAPIErrResponse) ContentType() string {
return "application/json"
}
func (j jsonAPIErrResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func (j jsonAPIErrResponse) Unwrap() error {
return j.err
}
func Error(e error) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(e),
}
}
func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
}
}

View File

@@ -0,0 +1,57 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type redirectHTTPResponse struct {
statusCode int
url string
headers []headerval
cookies []cookieval
}
func (j redirectHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Redirect(j.statusCode, j.url)
}
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j redirectHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j redirectHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j redirectHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j redirectHTTPResponse) ContentType() string {
return ""
}
func (j redirectHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Redirect(sc int, newURL string) HTTPResponse {
return &redirectHTTPResponse{statusCode: sc, url: newURL}
}

View File

@@ -0,0 +1,72 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"io"
"net/http"
"time"
)
type seekableResponse struct {
data io.ReadSeeker
contentType string
filename string
headers []headerval
cookies []cookieval
}
func (j seekableResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.contentType) // if we don't set it here http.ServeContent does weird sniffing later...
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
http.ServeContent(g.Writer, g.Request, j.filename, time.Unix(0, 0), j.data)
if clsr, ok := j.data.(io.ReadSeekCloser); ok {
err := clsr.Close()
if err != nil {
exerr.Wrap(err, "failed to close io.ReadSeerkClose in ginext.Seekable").Str("filename", j.filename).Print()
}
}
}
func (j seekableResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j seekableResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j seekableResponse) IsSuccess() bool {
return true
}
func (j seekableResponse) Statuscode() int {
return 200
}
func (j seekableResponse) BodyString(*gin.Context) *string {
return langext.Ptr("(seekable)")
}
func (j seekableResponse) ContentType() string {
return j.contentType
}
func (j seekableResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Seekable(filename string, contentType string, data io.ReadSeeker) HTTPResponse {
return &seekableResponse{filename: filename, contentType: contentType, data: data}
}

57
ginext/responseText.go Normal file
View File

@@ -0,0 +1,57 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type textHTTPResponse struct {
statusCode int
data string
headers []headerval
cookies []cookieval
}
func (j textHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.String(j.statusCode, "%s", j.data)
}
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j textHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j textHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j textHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(j.data)
}
func (j textHTTPResponse) ContentType() string {
return "text/plain"
}
func (j textHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Text(sc int, data string) HTTPResponse {
return &textHTTPResponse{statusCode: sc, data: data}
}

View File

@@ -57,7 +57,7 @@ func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper
}
func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper {
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
return w.Use(func(g *gin.Context) { g.Set(jsonFilterKey, filter) })
}
func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder {
@@ -112,7 +112,7 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
}
func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder {
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
return w.Use(func(g *gin.Context) { g.Set(jsonFilterKey, filter) })
}
func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {

55
go.mod
View File

@@ -1,54 +1,61 @@
module gogs.mikescher.com/BlackForestBytes/goext
go 1.22
go 1.23
require (
github.com/gin-gonic/gin v1.9.1
github.com/gin-gonic/gin v1.10.0
github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.-
github.com/jmoiron/sqlx v1.3.5
github.com/rs/xid v1.5.0
github.com/rs/zerolog v1.32.0
go.mongodb.org/mongo-driver v1.14.0
golang.org/x/crypto v0.21.0
golang.org/x/sys v0.18.0
golang.org/x/term v0.18.0
github.com/jmoiron/sqlx v1.4.0
github.com/rs/xid v1.6.0
github.com/rs/zerolog v1.33.0
go.mongodb.org/mongo-driver v1.17.1
golang.org/x/crypto v0.28.0
golang.org/x/sys v0.26.0
golang.org/x/term v0.25.0
)
require (
github.com/bytedance/sonic v1.11.3 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/chenzhuoyu/iasm v0.9.1 // indirect
github.com/disintegration/imaging v1.6.2
github.com/jung-kurt/gofpdf v1.16.2
golang.org/x/sync v0.8.0
)
require (
github.com/bytedance/sonic v1.12.4 // indirect
github.com/bytedance/sonic/loader v0.2.1 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
github.com/gabriel-vasile/mimetype v1.4.6 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.19.0 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/go-playground/validator/v10 v10.22.1 // indirect
github.com/goccy/go-json v0.10.3 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/uuid v1.5.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.17.7 // indirect
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.0 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
golang.org/x/arch v0.7.0 // indirect
golang.org/x/net v0.22.0 // indirect
golang.org/x/sync v0.6.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/protobuf v1.33.0 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
golang.org/x/arch v0.11.0 // indirect
golang.org/x/image v0.21.0 // indirect
golang.org/x/net v0.30.0 // indirect
golang.org/x/text v0.19.0 // indirect
google.golang.org/protobuf v1.35.1 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
modernc.org/libc v1.37.6 // indirect
modernc.org/mathutil v1.6.0 // indirect

217
go.sum
View File

@@ -1,34 +1,35 @@
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo=
github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.1 h1:JC0+6c9FoWYYxakaoa+c5QTtJeiSZNeByOBhXtAFSn4=
github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A=
github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.3 h1:jRN+yEjakWh8aK5FzrciUHG8OFXK+4/KrAX/ysEtHAA=
github.com/bytedance/sonic v1.11.3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/bytedance/sonic v1.12.3 h1:W2MGa7RCU1QTeYRTPE3+88mVC0yXmsRQRChiyVocVjU=
github.com/bytedance/sonic v1.12.3/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k=
github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM=
github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E=
github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4=
github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4=
github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc=
github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
@@ -37,98 +38,80 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE=
github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74=
github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U=
github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4=
github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU=
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg=
github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc=
github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0=
github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0=
github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI=
github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo=
github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0=
github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
@@ -141,53 +124,33 @@ github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk=
go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo=
go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80=
go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
go.mongodb.org/mongo-driver v1.17.1 h1:Wic5cJIwJgSpBhe3lx3+/RybR5PiYRMpVFgO7cOHyIM=
go.mongodb.org/mongo-driver v1.17.1/go.mod h1:wwWm/+BuOddhcq3n68LKRmgk2wXzmF6s0SFOa0GINL4=
golang.org/x/arch v0.11.0 h1:KXV8WWKCXm6tRpLirl2szsO5j/oOODwZf4hATmGVNs4=
golang.org/x/arch v0.11.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg=
golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s=
golang.org/x/image v0.21.0/go.mod h1:vUbsLavqK/W303ZroQQVKQ+Af3Yl6Uz1Ppu5J/cLz78=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4=
golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -195,39 +158,26 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE=
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24=
golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA=
google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@@ -242,4 +192,3 @@ modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E=
modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ=
modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@@ -1,5 +1,5 @@
package goext
const GoextVersion = "0.0.422"
const GoextVersion = "0.0.541"
const GoextVersionTimestamp = "2024-03-23T20:29:46+0100"
const GoextVersionTimestamp = "2024-11-05T14:38:42+0100"

View File

@@ -788,7 +788,7 @@ FieldLoop:
if f.omitEmpty && isEmptyValue(fv) {
continue
} else if opts.filter != nil && len(f.jsonfilter) > 0 && !f.jsonfilter.Contains(*opts.filter) {
} else if !matchesJSONFilter(f.jsonfilter, opts.filter) {
continue
}
e.WriteByte(next)
@@ -808,6 +808,30 @@ FieldLoop:
}
}
func matchesJSONFilter(filter jsonfilter, value *string) bool {
if len(filter) == 0 {
return true // no filter in struct
}
if value == nil || *value == "" {
return false // no filter set, but struct has filter, return false
}
if len(filter) == 1 && filter[0] == "-" {
return false
}
if filter.Contains(*value) {
return true
}
if filter.Contains("*") {
return true
}
return false
}
func newStructEncoder(t reflect.Type, tagkey string) encoderFunc {
se := structEncoder{fields: cachedTypeFields(t, tagkey)}
return se.encode
@@ -1333,7 +1357,7 @@ func typeFields(t reflect.Type, tagkey string) structFields {
var jsonfilter []string
jsonfilterTag := sf.Tag.Get("jsonfilter")
if jsonfilterTag != "" && jsonfilterTag != "-" {
if jsonfilterTag != "" {
jsonfilter = strings.Split(jsonfilterTag, ",")
}

3
imageext/enums.go Normal file
View File

@@ -0,0 +1,3 @@
package imageext
//go:generate go run ../_gen/enum-generate.go -- enums_gen.go

216
imageext/enums_gen.go Normal file
View File

@@ -0,0 +1,216 @@
// Code generated by enum-generate.go DO NOT EDIT.
package imageext
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import "gogs.mikescher.com/BlackForestBytes/goext/enums"
const ChecksumEnumGenerator = "1da5383c33ee442fd0b899369053f66bdc85bed2dbf906949d3edfeedfe13340" // GoExtVersion: 0.0.449
// ================================ ImageFit ================================
//
// File: image.go
// StringEnum: true
// DescrEnum: false
// DataEnum: false
//
var __ImageFitValues = []ImageFit{
ImageFitStretch,
ImageFitCover,
ImageFitContainCenter,
ImageFitContainTopLeft,
ImageFitContainTopRight,
ImageFitContainBottomLeft,
ImageFitContainBottomRight,
}
var __ImageFitVarnames = map[ImageFit]string{
ImageFitStretch: "ImageFitStretch",
ImageFitCover: "ImageFitCover",
ImageFitContainCenter: "ImageFitContainCenter",
ImageFitContainTopLeft: "ImageFitContainTopLeft",
ImageFitContainTopRight: "ImageFitContainTopRight",
ImageFitContainBottomLeft: "ImageFitContainBottomLeft",
ImageFitContainBottomRight: "ImageFitContainBottomRight",
}
func (e ImageFit) Valid() bool {
return langext.InArray(e, __ImageFitValues)
}
func (e ImageFit) Values() []ImageFit {
return __ImageFitValues
}
func (e ImageFit) ValuesAny() []any {
return langext.ArrCastToAny(__ImageFitValues)
}
func (e ImageFit) ValuesMeta() []enums.EnumMetaValue {
return ImageFitValuesMeta()
}
func (e ImageFit) String() string {
return string(e)
}
func (e ImageFit) VarName() string {
if d, ok := __ImageFitVarnames[e]; ok {
return d
}
return ""
}
func (e ImageFit) TypeName() string {
return "ImageFit"
}
func (e ImageFit) PackageName() string {
return "media"
}
func (e ImageFit) Meta() enums.EnumMetaValue {
return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}
}
func ParseImageFit(vv string) (ImageFit, bool) {
for _, ev := range __ImageFitValues {
if string(ev) == vv {
return ev, true
}
}
return "", false
}
func ImageFitValues() []ImageFit {
return __ImageFitValues
}
func ImageFitValuesMeta() []enums.EnumMetaValue {
return []enums.EnumMetaValue{
ImageFitStretch.Meta(),
ImageFitCover.Meta(),
ImageFitContainCenter.Meta(),
ImageFitContainTopLeft.Meta(),
ImageFitContainTopRight.Meta(),
ImageFitContainBottomLeft.Meta(),
ImageFitContainBottomRight.Meta(),
}
}
// ================================ ImageCompresson ================================
//
// File: image.go
// StringEnum: true
// DescrEnum: false
// DataEnum: false
//
var __ImageCompressonValues = []ImageCompresson{
CompressionPNGNone,
CompressionPNGSpeed,
CompressionPNGBest,
CompressionJPEG100,
CompressionJPEG90,
CompressionJPEG80,
CompressionJPEG70,
CompressionJPEG60,
CompressionJPEG50,
CompressionJPEG25,
CompressionJPEG10,
CompressionJPEG1,
}
var __ImageCompressonVarnames = map[ImageCompresson]string{
CompressionPNGNone: "CompressionPNGNone",
CompressionPNGSpeed: "CompressionPNGSpeed",
CompressionPNGBest: "CompressionPNGBest",
CompressionJPEG100: "CompressionJPEG100",
CompressionJPEG90: "CompressionJPEG90",
CompressionJPEG80: "CompressionJPEG80",
CompressionJPEG70: "CompressionJPEG70",
CompressionJPEG60: "CompressionJPEG60",
CompressionJPEG50: "CompressionJPEG50",
CompressionJPEG25: "CompressionJPEG25",
CompressionJPEG10: "CompressionJPEG10",
CompressionJPEG1: "CompressionJPEG1",
}
func (e ImageCompresson) Valid() bool {
return langext.InArray(e, __ImageCompressonValues)
}
func (e ImageCompresson) Values() []ImageCompresson {
return __ImageCompressonValues
}
func (e ImageCompresson) ValuesAny() []any {
return langext.ArrCastToAny(__ImageCompressonValues)
}
func (e ImageCompresson) ValuesMeta() []enums.EnumMetaValue {
return ImageCompressonValuesMeta()
}
func (e ImageCompresson) String() string {
return string(e)
}
func (e ImageCompresson) VarName() string {
if d, ok := __ImageCompressonVarnames[e]; ok {
return d
}
return ""
}
func (e ImageCompresson) TypeName() string {
return "ImageCompresson"
}
func (e ImageCompresson) PackageName() string {
return "media"
}
func (e ImageCompresson) Meta() enums.EnumMetaValue {
return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}
}
func ParseImageCompresson(vv string) (ImageCompresson, bool) {
for _, ev := range __ImageCompressonValues {
if string(ev) == vv {
return ev, true
}
}
return "", false
}
func ImageCompressonValues() []ImageCompresson {
return __ImageCompressonValues
}
func ImageCompressonValuesMeta() []enums.EnumMetaValue {
return []enums.EnumMetaValue{
CompressionPNGNone.Meta(),
CompressionPNGSpeed.Meta(),
CompressionPNGBest.Meta(),
CompressionJPEG100.Meta(),
CompressionJPEG90.Meta(),
CompressionJPEG80.Meta(),
CompressionJPEG70.Meta(),
CompressionJPEG60.Meta(),
CompressionJPEG50.Meta(),
CompressionJPEG25.Meta(),
CompressionJPEG10.Meta(),
CompressionJPEG1.Meta(),
}
}
// ================================ ================= ================================
func AllPackageEnums() []enums.Enum {
return []enums.Enum{
ImageFitStretch, // ImageFit
CompressionPNGNone, // ImageCompresson
}
}

321
imageext/image.go Normal file
View File

@@ -0,0 +1,321 @@
package imageext
import (
"bytes"
"fmt"
"github.com/disintegration/imaging"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/mathext"
"image"
"image/color"
"image/draw"
"image/jpeg"
"image/png"
"io"
"math"
)
type ImageFit string //@enum:type
const (
ImageFitStretch ImageFit = "STRETCH"
ImageFitCover ImageFit = "COVER"
ImageFitContainCenter ImageFit = "CONTAIN_CENTER"
ImageFitContainTopLeft ImageFit = "CONTAIN_TOPLEFT"
ImageFitContainTopRight ImageFit = "CONTAIN_TOPRIGHT"
ImageFitContainBottomLeft ImageFit = "CONTAIN_BOTTOMLEFT"
ImageFitContainBottomRight ImageFit = "CONTAIN_BOTTOMRIGHT"
)
type ImageCrop struct { // all crop values are percentages!
CropX float64 `bson:"cropX" json:"cropX"`
CropY float64 `bson:"cropY" json:"cropY"`
CropWidth float64 `bson:"cropWidth" json:"cropWidth"`
CropHeight float64 `bson:"cropHeight" json:"cropHeight"`
}
type ImageCompresson string //@enum:type
const (
CompressionPNGNone ImageCompresson = "PNG_NONE"
CompressionPNGSpeed ImageCompresson = "PNG_SPEED"
CompressionPNGBest ImageCompresson = "PNG_BEST"
CompressionJPEG100 ImageCompresson = "JPEG_100"
CompressionJPEG90 ImageCompresson = "JPEG_090"
CompressionJPEG80 ImageCompresson = "JPEG_080"
CompressionJPEG70 ImageCompresson = "JPEG_070"
CompressionJPEG60 ImageCompresson = "JPEG_060"
CompressionJPEG50 ImageCompresson = "JPEG_050"
CompressionJPEG25 ImageCompresson = "JPEG_025"
CompressionJPEG10 ImageCompresson = "JPEG_010"
CompressionJPEG1 ImageCompresson = "JPEG_001"
)
func CropImage(img image.Image, px float64, py float64, pw float64, ph float64) (image.Image, error) {
type subImager interface {
SubImage(r image.Rectangle) image.Image
}
x := int(float64(img.Bounds().Dx()) * px)
y := int(float64(img.Bounds().Dy()) * py)
w := int(float64(img.Bounds().Dx()) * pw)
h := int(float64(img.Bounds().Dy()) * ph)
if simg, ok := img.(subImager); ok {
return simg.SubImage(image.Rect(x, y, x+w, y+h)), nil
} else {
bfr1 := bytes.Buffer{}
err := png.Encode(&bfr1, img)
if err != nil {
return nil, exerr.Wrap(err, "").Build()
}
imgPNG, err := png.Decode(&bfr1)
if err != nil {
return nil, exerr.Wrap(err, "").Build()
}
return imgPNG.(subImager).SubImage(image.Rect(x, y, w+w, y+h)), nil
}
}
func EncodeImage(img image.Image, compression ImageCompresson) (bytes.Buffer, string, error) {
var err error
bfr := bytes.Buffer{}
switch compression {
case CompressionPNGNone:
enc := &png.Encoder{CompressionLevel: png.NoCompression}
err = enc.Encode(&bfr, img)
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/png", nil
case CompressionPNGSpeed:
enc := &png.Encoder{CompressionLevel: png.BestSpeed}
err = enc.Encode(&bfr, img)
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/png", nil
case CompressionPNGBest:
enc := &png.Encoder{CompressionLevel: png.BestCompression}
err = enc.Encode(&bfr, img)
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/png", nil
case CompressionJPEG100:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 100})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG90:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 90})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG80:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 80})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG70:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 70})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG60:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 60})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG50:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 50})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG25:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 25})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG10:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 10})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG1:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 1})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
default:
return bytes.Buffer{}, "", exerr.New(exerr.TypeInternal, "unknown compression method: "+compression.String()).Build()
}
}
func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fillColor color.Color) (image.Image, PercentageRectangle, error) {
iw := img.Bounds().Size().X
ih := img.Bounds().Size().Y
// [iw, ih] is the size of the image
// [bbw, bbh] is the target bounding box,
// - it specifies the target ratio
// - and the maximal target resolution
facW := float64(iw) / bbw
facH := float64(ih) / bbh
// facW is the ratio between iw and bbw
// - it is the factor by which the bounding box must be multiplied to reach the image size (in the x-axis)
//
// (same is true for facH, but for the height and y-axis)
if fit == ImageFitCover {
// image-fit:cover completely fills the target-bounding-box, it potentially cuts parts of the image away
// we use the smaller (!) value of facW and facH, because we want to have the smallest possible destination rect (due to file size)
// and because the image is made to completely fill the bounding-box, the smaller factor (= teh dimension the image is stretched more) is relevant
// but we cap `fac` at 1 (can be larger than 1)
// a value >1 would mean the final image resolution is biger than the bounding box, which we do not want.
// if the initial image (iw, ih) is already bigger than the bounding box (bbw, bbh), facW and facH are always >1 and fac will be 1
// which means we will simply use the bounding box as destination rect (and scale the image down)
fac := mathext.Clamp(mathext.Min(facW, facH), 0.0, 1.0)
// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio)
w := int(math.Round(bbw * fac))
h := int(math.Round(bbh * fac))
img = imaging.Fill(img, w, h, imaging.Center, imaging.Lanczos)
newImg := image.NewRGBA(image.Rect(0, 0, w, h))
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over)
return newImg, PercentageRectangle{0, 0, 1, 1}, nil
}
if fit == ImageFitContainCenter || fit == ImageFitContainTopLeft || fit == ImageFitContainTopRight || fit == ImageFitContainBottomLeft || fit == ImageFitContainBottomRight {
// image-fit:contain fills the target-bounding-box with the image, there is potentially empty-space, it potentially cuts parts of the image away
// we use the bigger (!) value of facW and facH,
// because the image is made to fit the bounding-box, the bigger factor (= the dimension the image is stretched less) is relevant
// but we cap `fac` at 1 (can be larger than 1)
// a value >1 would mean the final image resolution is biger than the bounding box, which we do not want.
// if the initial image (iw, ih) is already bigger than the bounding box (bbw, bbh), facW and facH are always >1 and fac will be 1
// which means we will simply use the bounding box as destination rect (and scale the image down)
facOut := mathext.Clamp(mathext.Max(facW, facH), 0.0, 1.0)
// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio)
// [ow|oh] ==> size of output image (same ratio as bounding box [bbw|bbh])
ow := int(math.Round(bbw * facOut))
oh := int(math.Round(bbh * facOut))
facScale := mathext.Min(float64(ow)/float64(iw), float64(oh)/float64(ih))
// [dw|dh] ==> size of destination rect (where to draw source in output image) (same ratio as input image [iw|ih])
dw := int(math.Round(float64(iw) * facScale))
dh := int(math.Round(float64(ih) * facScale))
img = imaging.Resize(img, dw, dh, imaging.Lanczos)
var destBounds image.Rectangle
if fit == ImageFitContainCenter {
destBounds = image.Rect((ow-dw)/2, (oh-dh)/2, (ow-dw)/2+dw, (oh-dh)/2+dh)
} else if fit == ImageFitContainTopLeft {
destBounds = image.Rect(0, 0, dw, dh)
} else if fit == ImageFitContainTopRight {
destBounds = image.Rect(ow-dw, 0, ow, dh)
} else if fit == ImageFitContainBottomLeft {
destBounds = image.Rect(0, oh-dh, dw, oh)
} else if fit == ImageFitContainBottomRight {
destBounds = image.Rect(ow-dw, oh-dh, ow, oh)
}
newImg := image.NewRGBA(image.Rect(0, 0, ow, oh))
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, destBounds, img, image.Pt(0, 0), draw.Over)
return newImg, calcRelativeRect(destBounds, newImg.Bounds()), nil
}
if fit == ImageFitStretch {
// image-fit:stretch simply stretches the image to the bounding box
// we use the bigger value of [facW;facH], to (potentially) scale the bounding box down before applying it
// theoretically we could directly use [bbw, bbh] in the call to imaging.Resize,
// but if the image is (a lot) smaller than the bouding box it is useful to scale it down to reduce final pdf filesize
// we also cap fac at 1, because we never want the final rect to be bigger than the inputted bounding box (see comments at start of method)
fac := mathext.Clamp(mathext.Max(facW, facH), 0.0, 1.0)
// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio)
w := int(math.Round(bbw * fac))
h := int(math.Round(bbh * fac))
img = imaging.Resize(img, w, h, imaging.Lanczos)
newImg := image.NewRGBA(image.Rect(0, 0, w, h))
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over)
return newImg, PercentageRectangle{0, 0, 1, 1}, nil
}
return nil, PercentageRectangle{}, exerr.New(exerr.TypeInternal, fmt.Sprintf("unknown image-fit: '%s'", fit)).Build()
}
func VerifyAndDecodeImage(data io.Reader, mime string) (image.Image, error) {
if mime == "image/jpeg" {
img, err := jpeg.Decode(data)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode blob as jpeg").WithType(exerr.TypeInvalidImage).Build()
}
return img, nil
}
if mime == "image/png" {
img, err := png.Decode(data)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode blob as png").WithType(exerr.TypeInvalidImage).Build()
}
return img, nil
}
return nil, exerr.New(exerr.TypeInvalidMimeType, fmt.Sprintf("unknown/invalid image mimetype: '%s'", mime)).Build()
}

35
imageext/types.go Normal file
View File

@@ -0,0 +1,35 @@
package imageext
import "image"
type Rectangle struct {
X float64
Y float64
W float64
H float64
}
type PercentageRectangle struct {
X float64 // [0..1]
Y float64 // [0..1]
W float64 // [0..1]
H float64 // [0..1]
}
func (r PercentageRectangle) Of(ref Rectangle) Rectangle {
return Rectangle{
X: ref.X + r.X*ref.W,
Y: ref.Y + r.Y*ref.H,
W: r.W * ref.W,
H: r.H * ref.H,
}
}
func calcRelativeRect(inner image.Rectangle, outer image.Rectangle) PercentageRectangle {
return PercentageRectangle{
X: float64(inner.Min.X-outer.Min.X) / float64(outer.Dx()),
Y: float64(inner.Min.Y-outer.Min.Y) / float64(outer.Dy()),
W: float64(inner.Dx()) / float64(outer.Dx()),
H: float64(inner.Dy()) / float64(outer.Dy()),
}
}

View File

@@ -59,6 +59,18 @@ func ArrUnique[T comparable](array []T) []T {
return result
}
func ArrUniqueStable[T comparable](array []T) []T {
hist := make(map[T]bool, len(array))
result := make([]T, 0, len(array))
for _, v := range array {
if _, ok := hist[v]; !ok {
hist[v] = true
result = append(result, v)
}
}
return result
}
func ArrEqualsExact[T comparable](arr1 []T, arr2 []T) bool {
if len(arr1) != len(arr2) {
return false
@@ -311,6 +323,16 @@ func ArrMap[T1 any, T2 any](arr []T1, conv func(v T1) T2) []T2 {
return r
}
func ArrDeRef[T1 any](arr []*T1) []T1 {
r := make([]T1, 0, len(arr))
for _, v := range arr {
if v != nil {
r = append(r, *v)
}
}
return r
}
func MapMap[TK comparable, TV any, TR any](inmap map[TK]TV, conv func(k TK, v TV) TR) []TR {
r := make([]TR, 0, len(inmap))
for k, v := range inmap {
@@ -453,6 +475,26 @@ func ArrConcat[T any](arr ...[]T) []T {
return r
}
// ArrAppend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one
func ArrAppend[T any](arr []T, add ...T) []T {
r := ArrCopy(arr)
for _, v := range add {
r = append(r, v)
}
return r
}
// ArrPrepend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one
// Also - in contrast to ArrAppend - the add values are inserted at the start of the resulting array (in reverse order)
func ArrPrepend[T any](arr []T, add ...T) []T {
out := make([]T, len(arr)+len(add))
copy(out[len(add):], arr)
for i := 0; i < len(add); i++ {
out[len(add)-i-1] = add[i]
}
return out
}
// ArrCopy does a shallow copy of the 'in' array
func ArrCopy[T any](in []T) []T {
out := make([]T, len(in))
@@ -468,6 +510,10 @@ func ArrRemove[T comparable](arr []T, needle T) []T {
return arr
}
func ArrRemoveAt[T any](arr []T, idx int) []T {
return append(arr[:idx], arr[idx+1:]...)
}
func ArrExcept[T comparable](arr []T, needles ...T) []T {
r := make([]T, 0, len(arr))
rmlist := ArrToSet(needles)
@@ -528,3 +574,18 @@ func ArrChunk[T any](arr []T, chunkSize int) [][]T {
return res
}
func ArrGroupBy[T1 any, T2 comparable](arr []T1, groupfunc func(v T1) T2) map[T2][]T1 {
r := make(map[T2][]T1)
for _, v := range arr {
key := groupfunc(v)
if _, ok := r[key]; ok {
r[key] = append(r[key], v)
} else {
r[key] = []T1{v}
}
}
return r
}

View File

@@ -2,6 +2,7 @@ package langext
import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"strings"
"testing"
)
@@ -10,3 +11,13 @@ func TestJoinString(t *testing.T) {
res := JoinString(ids, ",")
tst.AssertEqual(t, res, "1,2,3")
}
func TestArrPrepend(t *testing.T) {
v1 := []string{"1", "2", "3"}
v2 := ArrPrepend(v1, "4", "5", "6")
tst.AssertEqual(t, strings.Join(v1, ""), "123")
tst.AssertEqual(t, strings.Join(v2, ""), "654123")
}

View File

@@ -77,6 +77,14 @@ func Coalesce4Opt[T any](v1 *T, v2 *T, v3 *T, v4 *T) *T {
return v4
}
func CoalesceDblPtr[T any](v1 **T, v2 *T) *T {
if v1 != nil {
return *v1
}
return v2
}
func CoalesceString(s *string, def string) string {
if s == nil {
return def
@@ -125,6 +133,31 @@ func CoalesceStringer(s fmt.Stringer, def string) string {
}
}
func CoalesceDefault[T comparable](v1 T, def T) T {
if v1 != *new(T) {
return v1
}
return def
}
func CoalesceDefaultArr[T comparable](v1 T, vMore ...T) T {
if v1 != *new(T) {
return v1
}
if len(vMore) == 0 {
return v1
}
for i := 0; i < len(vMore)-1; i++ {
if vMore[i] != *new(T) {
return v1
}
}
return vMore[len(vMore)-1]
}
func SafeCast[T any](v any, def T) T {
switch r := v.(type) {
case T:

15
langext/io.go Normal file
View File

@@ -0,0 +1,15 @@
package langext
import "io"
type nopCloser struct {
io.Writer
}
func (n nopCloser) Close() error {
return nil // no op
}
func WriteNopCloser(w io.Writer) io.WriteCloser {
return nopCloser{w}
}

View File

@@ -63,3 +63,51 @@ func PatchRemJson[JV string | []byte](rawjson JV, key string) (JV, error) {
return JV(newjson), nil
}
func MarshalJsonOrPanic(v any) string {
bin, err := json.Marshal(v)
if err != nil {
panic(err)
}
return string(bin)
}
func MarshalJsonOrDefault(v any, def string) string {
bin, err := json.Marshal(v)
if err != nil {
return def
}
return string(bin)
}
func MarshalJsonOrNil(v any) *string {
bin, err := json.Marshal(v)
if err != nil {
return nil
}
return Ptr(string(bin))
}
func MarshalJsonIndentOrPanic(v any, prefix, indent string) string {
bin, err := json.MarshalIndent(v, prefix, indent)
if err != nil {
panic(err)
}
return string(bin)
}
func MarshalJsonIndentOrDefault(v any, prefix, indent string, def string) string {
bin, err := json.MarshalIndent(v, prefix, indent)
if err != nil {
return def
}
return string(bin)
}
func MarshalJsonIndentOrNil(v any, prefix, indent string) *string {
bin, err := json.MarshalIndent(v, prefix, indent)
if err != nil {
return nil
}
return Ptr(string(bin))
}

View File

@@ -66,7 +66,7 @@ func CopyMap[K comparable, V any](a map[K]V) map[K]V {
func ForceMap[K comparable, V any](v map[K]V) map[K]V {
if v == nil {
return make(map[K]V, 0)
return make(map[K]V)
} else {
return v
}

19
langext/object.go Normal file
View File

@@ -0,0 +1,19 @@
package langext
import "encoding/json"
func DeepCopyByJson[T any](v T) (T, error) {
bin, err := json.Marshal(v)
if err != nil {
return *new(T), err
}
var result T
err = json.Unmarshal(bin, &result)
if err != nil {
return *new(T), err
}
return result, nil
}

View File

@@ -11,7 +11,7 @@ func (p PanicWrappedErr) Error() string {
return "A panic occured"
}
func (p PanicWrappedErr) ReoveredObj() any {
func (p PanicWrappedErr) RecoveredObj() any {
return p.panic
}

View File

@@ -22,11 +22,22 @@ func DblPtr[T any](v T) **T {
return &v_
}
func DblPtrIfNotNil[T any](v *T) **T {
if v == nil {
return nil
}
return &v
}
func DblPtrNil[T any]() **T {
var v *T = nil
return &v
}
func ArrPtr[T any](v ...T) *[]T {
return &v
}
func PtrInt32(v int32) *int32 {
return &v
}

View File

@@ -88,12 +88,15 @@ func StrRunePadRight(str string, pad string, padlen int) string {
func Indent(str string, pad string) string {
eonl := strings.HasSuffix(str, "\n")
if eonl {
str = str[0 : len(str)-1]
}
r := ""
for _, v := range strings.Split(str, "\n") {
r += pad + v + "\n"
}
if eonl {
if !eonl {
r = r[0 : len(r)-1]
}
@@ -115,3 +118,21 @@ func StrRepeat(val string, count int) string {
}
return r
}
func StrWrap(val string, linelen int, seperator string) string {
res := ""
for iPos := 0; ; {
next := min(iPos+linelen, len(val))
res += val[iPos:next]
iPos = next
if iPos >= len(val) {
break
}
res += seperator
}
return res
}

152
langext/string_test.go Normal file
View File

@@ -0,0 +1,152 @@
package langext
import "testing"
func TestStrLimitBehaviour(t *testing.T) {
val := "Hello, World!"
maxlen := 5
suffix := "..."
expected := "He..."
result := StrLimit(val, maxlen, suffix)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrSplitBehaviour1(t *testing.T) {
val := "Hello,World,,"
sep := ","
expected := []string{"Hello", "World"}
result := StrSplit(val, sep, false)
if len(result) != len(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrSplitBehaviour2(t *testing.T) {
val := "Hello,World,,"
sep := ","
expected := []string{"Hello", "World", "", ""}
result := StrSplit(val, sep, true)
if len(result) != len(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrPadRightBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "Hello*****"
result := StrPadRight(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrPadLeftBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "*****Hello"
result := StrPadLeft(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRunePadLeftBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "*****Hello"
result := StrRunePadLeft(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRunePadRightBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "Hello*****"
result := StrRunePadRight(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIndentBehaviour1(t *testing.T) {
str := "Hello\nWorld"
pad := ".."
expected := "..Hello\n..World"
result := Indent(str, pad)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIndentBehaviour2(t *testing.T) {
str := "Hello\nWorld\n"
pad := ".."
expected := "..Hello\n..World\n"
result := Indent(str, pad)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRepeatBehaviour(t *testing.T) {
val := "Hello"
count := 3
expected := "HelloHelloHello"
result := StrRepeat(val, count)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour1(t *testing.T) {
val := "123456789"
linelen := 5
seperator := "\n"
expected := "12345\n6789"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour2(t *testing.T) {
val := "1234567890"
linelen := 5
seperator := "\n"
expected := "12345\n67890"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour3(t *testing.T) {
val := "****************"
linelen := 4
seperator := "\n"
expected := "****\n****\n****\n****"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour4(t *testing.T) {
val := "*****************"
linelen := 4
seperator := "\n"
expected := "****\n****\n****\n****\n*"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}

View File

@@ -1,6 +1,9 @@
package mathext
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import (
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func Sum[T langext.NumberConstraint](v []T) T {
total := T(0)
@@ -41,3 +44,53 @@ func ArrMax[T langext.OrderedConstraint](v []T) T {
}
return r
}
func MustPercentile[T langext.NumberConstraint](rawdata []T, percentile float64) T {
v, err := Percentile(rawdata, percentile)
if err != nil {
panic(err)
}
return v
}
func Percentile[T langext.NumberConstraint](rawdata []T, percentile float64) (T, error) {
v, err := FloatPercentile(rawdata, percentile)
if err != nil {
return T(0), err
}
return T(v), nil
}
func FloatPercentile[T langext.NumberConstraint](rawdata []T, percentile float64) (float64, error) {
if len(rawdata) == 0 {
return 0, exerr.New(exerr.TypeAssert, "no data to calculate percentile").Any("percentile", percentile).Build()
}
if percentile < 0 || percentile > 100 {
return 0, exerr.New(exerr.TypeAssert, "percentile out of range").Any("percentile", percentile).Build()
}
data := langext.ArrCopy(rawdata)
langext.Sort(data)
idxFloat := float64(len(data)-1) * (percentile / float64(100))
idxInt := int(idxFloat)
// exact match on index
if idxFloat == float64(idxInt) {
return float64(data[idxInt]), nil
}
// linear interpolation
v1 := data[idxInt]
v2 := data[idxInt+1]
weight := idxFloat - float64(idxInt)
valFloat := (float64(v1) * (1 - weight)) + (float64(v2) * weight)
return valFloat, nil
}

238
mathext/statistics_test.go Normal file
View File

@@ -0,0 +1,238 @@
package mathext
import (
"math"
"testing"
)
func TestSumIntsHappyPath(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
expected := 15
result := Sum(values)
if result != expected {
t.Errorf("Sum of %v; expected %v, got %v", values, expected, result)
}
}
func TestSumFloatsHappyPath(t *testing.T) {
values := []float64{1.1, 2.2, 3.3}
expected := 6.6
result := Sum(values)
if result != expected {
t.Errorf("Sum of %v; expected %v, got %v", values, expected, result)
}
}
func TestMeanOfInts(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
expected := 3.0
result := Mean(values)
if result != expected {
t.Errorf("Mean of %v; expected %v, got %v", values, expected, result)
}
}
func TestMedianOddNumberOfElements(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
expected := 3.0
result := Median(values)
if result != expected {
t.Errorf("Median of %v; expected %v, got %v", values, expected, result)
}
}
func TestMedianEvenNumberOfElements(t *testing.T) {
values := []float64{1, 2, 3, 4, 5, 6}
expected := 3.5
result := Median(values)
if result != expected {
t.Errorf("Median of %v; expected %v, got %v", values, expected, result)
}
}
func TestArrMinInts(t *testing.T) {
values := []int{5, 3, 9, 1, 4}
expected := 1
result := ArrMin(values)
if result != expected {
t.Errorf("ArrMin of %v; expected %v, got %v", values, expected, result)
}
}
func TestArrMaxInts(t *testing.T) {
values := []int{5, 3, 9, 1, 4}
expected := 9
result := ArrMax(values)
if result != expected {
t.Errorf("ArrMax of %v; expected %v, got %v", values, expected, result)
}
}
func TestPercentileValidInput(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
percentile := 50.0
expected := 3
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileOutOfRange(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
percentile := 150.0
_, err := Percentile(values, percentile)
if err == nil {
t.Errorf("Expected error for percentile %v out of range, got nil", percentile)
}
}
func TestPercentileValueInArray(t *testing.T) {
values := []int{1, 3, 5, 7, 9}
percentile := 40.0
expected := 4
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileValueInArray(t *testing.T) {
values := []int{1, 3, 5, 7, 9}
percentile := 40.0
expected := 4.2
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileInterpolation(t *testing.T) {
values := []float64{1.0, 2.0, 3.0, 4.0, 5.0}
percentile := 25.0
expected := 2.0
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileSingleValue(t *testing.T) {
values := []int{10}
percentile := 50.0
expected := 10
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileExactlyBetweenTwoValues(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
percentile := 62.5 // Exactly between 3 and 4
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileTwoThirdsBetweenTwoValues(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 66.666666666666
expected := 6.666666666666667 // Since 2/3 of the way between 6 and 7 is 6.666...
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileBetweenTwoValues1(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 11.0
expected := 1.1
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileBetweenTwoValues2(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 9.0
expected := 0.9
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInput(t *testing.T) {
values := []float64{5, 1, 4, 2, 3} // Unsorted input
percentile := 50.0
expected := 3.0
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInputLowPercentile(t *testing.T) {
values := []float64{10, 6, 7, 3, 2, 9, 8, 1, 4, 5} // Unsorted input
percentile := 10.0
expected := 1.9 // Expecting interpolation between 1 and 2
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInputHighPercentile(t *testing.T) {
values := []float64{10, 6, 7, 3, 2, 9, 8, 1, 4, 5} // Unsorted input
percentile := 90.0
expected := 9.1 // Expecting interpolation between 9 and 10
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileExactValueFromInput(t *testing.T) {
values := []float64{1.5, 2.5, 3.5, 4.5, 5.5}
percentile := 50.0 // Exact value from input array should be 3.5
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileInterpolatedValue(t *testing.T) {
values := []float64{1.0, 2.0, 3.0, 4.0, 5.0}
percentile := 87.5 // Interpolated value between 4.0 and 5.0
expected := 4.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileUnsortedInputExactValue(t *testing.T) {
values := []float64{5.5, 1.5, 4.5, 2.5, 3.5} // Unsorted input
percentile := 50.0
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileUnsortedInputInterpolatedValue(t *testing.T) {
values := []float64{10.5, 6.5, 7.5, 3.5, 2.5, 9.5, 8.5, 1.5, 4.5, 5.5}
percentile := 80.0 // Interpolated value between 4.0 and 5.0
expected := 8.7
result, err := FloatPercentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}

View File

@@ -5,6 +5,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsontype"
"go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rfctime"
"reflect"
@@ -34,6 +35,15 @@ func CreateGoExtBsonRegistry() *bsoncodec.Registry {
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.SecondsF64(0)), rfctime.SecondsF64(0))
rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(rfctime.SecondsF64(0))), rfctime.SecondsF64(0))
rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorCategory{}), exerr.ErrorCategory{})
rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorCategory{})), exerr.ErrorCategory{})
rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorSeverity{}), exerr.ErrorSeverity{})
rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorSeverity{})), exerr.ErrorSeverity{})
rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorType{}), exerr.ErrorType{})
rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorType{})), exerr.ErrorType{})
bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb)
bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb)

View File

@@ -1,13 +1,14 @@
package pagination
import (
"context"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
)
type MongoFilter interface {
FilterQuery() mongo.Pipeline
Sort() bson.D
FilterQuery(ctx context.Context) mongo.Pipeline
Sort(ctx context.Context) bson.D
}
type dynamicFilter struct {
@@ -15,11 +16,11 @@ type dynamicFilter struct {
sort bson.D
}
func (d dynamicFilter) FilterQuery() mongo.Pipeline {
func (d dynamicFilter) FilterQuery(ctx context.Context) mongo.Pipeline {
return d.pipeline
}
func (d dynamicFilter) Sort() bson.D {
func (d dynamicFilter) Sort(ctx context.Context) bson.D {
return d.sort
}

View File

@@ -2,29 +2,44 @@ package reflectext
import (
"encoding/json"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
)
type ConvertStructToMapOpt struct {
KeepJsonMarshalTypes bool
MaxDepth *int
}
func ConvertStructToMap(v any, opts ...ConvertStructToMapOpt) any {
func ConvertStructToMap(v any, opts ...ConvertStructToMapOpt) map[string]any {
opt := ConvertStructToMapOpt{}
if len(opts) > 0 {
opt = opts[0]
}
return reflectToMap(reflect.ValueOf(v), opt)
res := reflectToMap(reflect.ValueOf(v), 1, opt)
if v, ok := res.(map[string]any); ok {
return v
} else if langext.IsNil(res) {
return nil
} else {
panic("not an object")
}
}
func reflectToMap(fv reflect.Value, opt ConvertStructToMapOpt) any {
func reflectToMap(fv reflect.Value, depth int, opt ConvertStructToMapOpt) any {
if opt.MaxDepth != nil && depth > *opt.MaxDepth {
return fv.Interface()
}
if fv.Kind() == reflect.Ptr {
if fv.IsNil() {
return nil
} else {
return reflectToMap(fv.Elem(), opt)
return reflectToMap(fv.Elem(), depth, opt)
}
}
@@ -41,7 +56,7 @@ func reflectToMap(fv reflect.Value, opt ConvertStructToMapOpt) any {
arrlen := fv.Len()
arr := make([]any, arrlen)
for i := 0; i < arrlen; i++ {
arr[i] = reflectToMap(fv.Index(i), opt)
arr[i] = reflectToMap(fv.Index(i), depth+1, opt)
}
return arr
@@ -52,7 +67,7 @@ func reflectToMap(fv reflect.Value, opt ConvertStructToMapOpt) any {
arrlen := fv.Len()
arr := make([]any, arrlen)
for i := 0; i < arrlen; i++ {
arr[i] = reflectToMap(fv.Index(i), opt)
arr[i] = reflectToMap(fv.Index(i), depth+1, opt)
}
return arr
@@ -75,7 +90,7 @@ func reflectToMap(fv reflect.Value, opt ConvertStructToMapOpt) any {
for i := 0; i < fv.NumField(); i++ {
if fv.Type().Field(i).IsExported() {
res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i), opt)
res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i), depth+1, opt)
}
}

View File

@@ -0,0 +1,42 @@
package reflectext
import (
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"testing"
"time"
)
func TestConvertStructToMap(t *testing.T) {
type tst struct {
FieldA int
FieldB string
FieldC time.Time
FieldD []float64
FieldE1 *int
FieldE2 **int
FieldE3 *int
FieldE4 **int
FieldE5 *int
FieldE6 **int
}
value := tst{
FieldA: 123,
FieldB: "hello",
FieldC: time.Date(2020, 05, 12, 8, 30, 0, 0, time.UTC),
FieldD: []float64{1, 2, 3, 4, 5, 6, 7},
FieldE1: nil,
FieldE2: nil,
FieldE3: langext.Ptr(12),
FieldE4: langext.DblPtr(12),
FieldE5: nil,
FieldE6: langext.DblPtrNil[int](),
}
valueOut := ConvertStructToMap(value, ConvertStructToMapOpt{KeepJsonMarshalTypes: true})
fmt.Printf("%+v\n", valueOut)
}

View File

@@ -47,9 +47,3 @@ func TestGetMapField(t *testing.T) {
tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test")), "12 true")
tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test2")), "0 false")
}
func main2() {
}
func main() {
}

185
reflectext/structAccess.go Normal file
View File

@@ -0,0 +1,185 @@
package reflectext
import (
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"strings"
)
var ErrAccessStructInvalidFieldType = errors.New("invalid field type")
var ErrAccessStructFieldInPathWasNil = errors.New("a field in the path was nil")
var ErrAccessStructInvalidArrayIndex = errors.New("invalid array index")
var ErrAccessStructInvalidMapKey = errors.New("invalid map key")
var ErrAccessStructArrayAccess = errors.New("trying to access array")
var ErrAccessStructMapAccess = errors.New("trying to access map")
var ErrAccessStructMissingField = errors.New("missing field")
type AccessStructOpt struct {
ReturnNilOnMissingFields bool // return nil (instead of error) when a field in the path is missing (aka the supplied path is wrong)
ReturnNilOnNilPtrFields bool // return nil (instead of error) when a field in the path is nil
ReturnNilOnWrongFinalFieldType bool // return nil (instead of error) when the (final) field is not of the requested generic type
ReturnNilOnWrongIntermedFieldType bool // return nil (instead of error) when the intermediate field has an invalid type
ReturnNilOnInvalidArrayIndizes bool // return nil (instead of error) when trying to acces an array with an invalid index (not a number or out of range)
ReturnNilOnMissingMapKeys bool // return nil (instead of error) when trying to access a map with a missing key
UsedTagForKeys *string // Use this tag for key names in the struct (instead of the StructField.Name)
PreventArrayAccess bool // do not access array indizes - throw an error instead
PreventMapAccess bool // do not access maps - throw an error instead
}
func AccessJSONStruct[TResult any](v any, path string) (TResult, error) {
return AccessStructByStringPath[TResult](v, path, AccessStructOpt{UsedTagForKeys: langext.Ptr("json")})
}
func AccessStruct[TResult any](v any, path string) (TResult, error) {
return AccessStructByStringPath[TResult](v, path, AccessStructOpt{})
}
func AccessStructByArrayPath[TResult any](v any, path []string, opts ...AccessStructOpt) (TResult, error) {
opt := AccessStructOpt{}
if len(opts) > 0 {
opt = opts[0]
}
resultVal, err := accessStructByPath(reflect.ValueOf(v), path, opt)
if err != nil {
return *new(TResult), err
}
if resultValCast, ok := resultVal.(TResult); ok {
return resultValCast, nil
} else if opt.ReturnNilOnWrongFinalFieldType {
return *new(TResult), nil
} else {
return *new(TResult), ErrAccessStructInvalidFieldType
}
}
func AccessStructByStringPath[TResult any](v any, path string, opts ...AccessStructOpt) (TResult, error) {
opt := AccessStructOpt{}
if len(opts) > 0 {
opt = opts[0]
}
arrpath := strings.Split(path, ".")
resultVal, err := accessStructByPath(reflect.ValueOf(v), arrpath, opt)
if err != nil {
return *new(TResult), err
}
if resultValCast, ok := resultVal.(TResult); ok {
return resultValCast, nil
} else if opt.ReturnNilOnWrongFinalFieldType {
return *new(TResult), nil
} else {
return *new(TResult), ErrAccessStructInvalidFieldType
}
}
func accessStructByPath(val reflect.Value, path []string, opt AccessStructOpt) (any, error) {
if len(path) == 0 {
return val.Interface(), nil
}
currPath := path[0]
if val.Kind() == reflect.Ptr {
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
return accessStructByPath(val.Elem(), path, opt)
}
if val.Kind() == reflect.Array || val.Kind() == reflect.Slice {
if opt.PreventArrayAccess {
return nil, ErrAccessStructArrayAccess
}
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
arrIdx, err := strconv.ParseInt(currPath, 10, 64)
if err != nil {
if opt.ReturnNilOnInvalidArrayIndizes {
return nil, nil
} else {
return nil, ErrAccessStructInvalidArrayIndex
}
}
if arrIdx < 0 || int(arrIdx) >= val.Len() {
if opt.ReturnNilOnInvalidArrayIndizes {
return nil, nil
} else {
return nil, ErrAccessStructInvalidArrayIndex
}
}
return accessStructByPath(val.Index(int(arrIdx)), path[1:], opt)
}
if val.Kind() == reflect.Map {
if opt.PreventMapAccess {
return nil, ErrAccessStructMapAccess
}
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
mapval := val.MapIndex(reflect.ValueOf(currPath))
if !mapval.IsValid() || mapval.IsZero() {
if opt.ReturnNilOnMissingMapKeys {
return nil, nil
} else {
return nil, ErrAccessStructInvalidMapKey
}
}
return accessStructByPath(mapval, path[1:], opt)
}
if val.Kind() == reflect.Struct {
if opt.UsedTagForKeys != nil {
for i := 0; i < val.NumField(); i++ {
if val.Type().Field(i).Tag.Get(*opt.UsedTagForKeys) == currPath {
return accessStructByPath(val.Field(i), path[1:], opt)
}
}
if opt.ReturnNilOnMissingFields {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
} else {
for i := 0; i < val.NumField(); i++ {
if val.Type().Field(i).Name == currPath {
return accessStructByPath(val.Field(i), path[1:], opt)
}
}
if opt.ReturnNilOnMissingFields {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
}
}
if opt.ReturnNilOnWrongIntermedFieldType {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
}

View File

@@ -0,0 +1,259 @@
package reflectext
import "testing"
type TestStruct struct {
Name string `json:"name"`
Age int `json:"age"`
}
func TestAccessStructByArrayPath_HappyPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
result, err := AccessStructByArrayPath[string](testStruct, []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidField(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByStringPath_HappyPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
result, err := AccessStructByStringPath[string](testStruct, "Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByStringPath_InvalidField(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByStringPath[string](testStruct, "Invalid")
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type RecursiveStruct struct {
Name string
Sub *RecursiveStruct
SubSlice []RecursiveStruct
}
func TestAccessStructByArrayPath_RecursiveStruct(t *testing.T) {
testStruct := RecursiveStruct{Name: "John", Sub: &RecursiveStruct{Name: "Jane"}}
result, err := AccessStructByArrayPath[string](*testStruct.Sub, []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_RecursiveStructSlice(t *testing.T) {
testStruct := RecursiveStruct{Name: "John", SubSlice: []RecursiveStruct{{Name: "Jane"}}}
result, err := AccessStructByArrayPath[string](testStruct.SubSlice[0], []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_WrongType(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[int](testStruct, []string{"Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByArrayPath_InvalidPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Name", "Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type NestedStruct struct {
Name string
Sub *TestStruct
}
func TestAccessStructByStringPath_NestedStruct(t *testing.T) {
testStruct := NestedStruct{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
result, err := AccessStructByStringPath[string](testStruct, "Sub.Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
type DeepNestedStruct struct {
Name string
Sub *NestedStruct
}
func TestAccessStructByStringPath_DeepNestedStruct(t *testing.T) {
testStruct := DeepNestedStruct{Name: "John", Sub: &NestedStruct{Name: "Jane", Sub: &TestStruct{Name: "Doe", Age: 30}}}
result, err := AccessStructByStringPath[string](testStruct, "Sub.Sub.Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Doe" {
t.Errorf("Expected 'Doe', got '%s'", result)
}
}
type MapStruct struct {
Name string
Age int
}
type TestStructWithMap struct {
MapField map[string]MapStruct
}
func TestAccessStructByArrayPath_MapField(t *testing.T) {
testStruct := TestStructWithMap{
MapField: map[string]MapStruct{
"key": {Name: "John", Age: 30},
},
}
result, err := AccessStructByArrayPath[string](testStruct, []string{"MapField", "key", "Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidMapKey(t *testing.T) {
testStruct := TestStructWithMap{
MapField: map[string]MapStruct{
"key": {Name: "John", Age: 30},
},
}
_, err := AccessStructByArrayPath[string](testStruct, []string{"MapField", "invalid", "Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type ArrayStruct struct {
Name string
Arr []TestStruct
}
func TestAccessStructByArrayPath_ArrayField(t *testing.T) {
testStruct := ArrayStruct{
Name: "John",
Arr: []TestStruct{{Name: "Jane", Age: 30}},
}
result, err := AccessStructByArrayPath[string](testStruct, []string{"Arr", "0", "Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidArrayIndex(t *testing.T) {
testStruct := ArrayStruct{
Name: "John",
Arr: []TestStruct{{Name: "Jane", Age: 30}},
}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Arr", "1", "Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type FunctionStruct struct {
Name string
Func func() string
}
func TestAccessStructByArrayPath_FunctionField(t *testing.T) {
testStruct := FunctionStruct{Name: "John", Func: func() string { return "Hello" }}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Func"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByArrayPath_NonExistentPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"NonExistent"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type NestedStructWithTag struct {
Name string `json:"name"`
Sub *TestStruct `json:"sub"`
}
func TestAccessStructByArrayPath_UsedTagForKeys(t *testing.T) {
testStruct := NestedStructWithTag{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
tag := "json"
result, err := AccessStructByArrayPath[string](testStruct, []string{"sub", "name"}, AccessStructOpt{UsedTagForKeys: &tag})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_UsedTagForKeysInvalid(t *testing.T) {
testStruct := NestedStructWithTag{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
tag := "json"
_, err := AccessStructByArrayPath[string](testStruct, []string{"sub", "invalid"}, AccessStructOpt{UsedTagForKeys: &tag})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type DifferentTypeStruct struct {
Name string
Age int
}
func TestAccessStructByArrayPath_DifferentType(t *testing.T) {
testStruct := DifferentTypeStruct{Name: "John", Age: 30}
result, err := AccessStructByArrayPath[any](testStruct, []string{"Age"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != 30 {
t.Errorf("Expected '30', got '%v'", result)
}
}
func TestAccessStructByArrayPath_DifferentTypeInvalid(t *testing.T) {
testStruct := DifferentTypeStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[any](testStruct, []string{"Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"time"
)
@@ -245,6 +246,13 @@ func NewRFC3339(t time.Time) RFC3339Time {
return RFC3339Time(t)
}
func NewRFC3339Ptr(t *time.Time) *RFC3339Time {
if t == nil {
return nil
}
return langext.Ptr(RFC3339Time(*t))
}
func NowRFC3339() RFC3339Time {
return RFC3339Time(time.Now())
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"time"
)
@@ -245,6 +246,13 @@ func NewRFC3339Nano(t time.Time) RFC3339NanoTime {
return RFC3339NanoTime(t)
}
func NewRFC3339NanoPtr(t *time.Time) *RFC3339NanoTime {
if t == nil {
return nil
}
return langext.Ptr(RFC3339NanoTime(*t))
}
func NowRFC3339Nano() RFC3339NanoTime {
return RFC3339NanoTime(time.Now())
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"time"
@@ -239,6 +240,13 @@ func NewUnix(t time.Time) UnixTime {
return UnixTime(t)
}
func NewUnixPtr(t *time.Time) *UnixTime {
if t == nil {
return nil
}
return langext.Ptr(UnixTime(*t))
}
func NowUnix() UnixTime {
return UnixTime(time.Now())
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"time"
@@ -239,6 +240,13 @@ func NewUnixMilli(t time.Time) UnixMilliTime {
return UnixMilliTime(t)
}
func NewUnixMilliPtr(t *time.Time) *UnixMilliTime {
if t == nil {
return nil
}
return langext.Ptr(UnixMilliTime(*t))
}
func NowUnixMilli() UnixMilliTime {
return UnixMilliTime(time.Now())
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"time"
@@ -239,6 +240,13 @@ func NewUnixNano(t time.Time) UnixNanoTime {
return UnixNanoTime(t)
}
func NewUnixNanoPtr(t *time.Time) *UnixNanoTime {
if t == nil {
return nil
}
return langext.Ptr(UnixNanoTime(*t))
}
func NowUnixNano() UnixNanoTime {
return UnixNanoTime(time.Now())
}

View File

@@ -7,6 +7,7 @@ import (
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"sync"
"time"
)
type DB interface {
@@ -57,89 +58,121 @@ func (db *database) AddListener(listener Listener) {
func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreExecMeta{Context: ctx, TransactionConstructorContext: nil}
for _, v := range db.lstr {
err := v.PreExec(ctx, nil, &sqlstr, &prep)
err := v.PreExec(ctx, nil, &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
res, err := db.db.NamedExecContext(ctx, sqlstr, prep)
postMeta := PostExecMeta{Context: ctx, TransactionConstructorContext: nil, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostExec(nil, origsql, sqlstr, prep)
v.PostExec(nil, origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
return res, nil
}
func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreQueryMeta{Context: ctx, TransactionConstructorContext: nil}
for _, v := range db.lstr {
err := v.PreQuery(ctx, nil, &sqlstr, &prep)
err := v.PreQuery(ctx, nil, &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
rows, err := sqlx.NamedQueryContext(ctx, db.db, sqlstr, prep)
postMeta := PostQueryMeta{Context: ctx, TransactionConstructorContext: nil, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostQuery(nil, origsql, sqlstr, prep)
v.PostQuery(nil, origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to [query] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
return rows, nil
}
func (db *database) Ping(ctx context.Context) error {
t0 := time.Now()
preMeta := PrePingMeta{Context: ctx}
for _, v := range db.lstr {
err := v.PrePing(ctx)
err := v.PrePing(ctx, preMeta)
if err != nil {
return err
}
}
t1 := time.Now()
err := db.db.PingContext(ctx)
postMeta := PostPingMeta{Context: ctx, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostPing(err)
v.PostPing(err, postMeta)
}
if err != nil {
return exerr.Wrap(err, "Failed to [ping] sql database").Build()
}
return nil
}
func (db *database) BeginTransaction(ctx context.Context, iso sql.IsolationLevel) (Tx, error) {
t0 := time.Now()
db.lock.Lock()
txid := db.txctr
db.txctr += 1 // with overflow !
db.lock.Unlock()
preMeta := PreTxBeginMeta{Context: ctx}
for _, v := range db.lstr {
err := v.PreTxBegin(ctx, txid)
err := v.PreTxBegin(ctx, txid, preMeta)
if err != nil {
return nil, err
}
}
t1 := time.Now()
xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso})
postMeta := PostTxBeginMeta{Context: ctx, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostTxBegin(txid, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to start sql transaction").Build()
}
for _, v := range db.lstr {
v.PostTxBegin(txid, err)
}
return NewTransaction(xtx, txid, db), nil
return newTransaction(ctx, xtx, txid, db), nil
}
func (db *database) Exit() error {

View File

@@ -1,188 +1,266 @@
package sq
import "context"
import (
"context"
"time"
)
type PrePingMeta struct {
Context context.Context
}
type PreTxBeginMeta struct {
Context context.Context
ConstructorContext context.Context
}
type PreTxCommitMeta struct {
ConstructorContext context.Context
}
type PreTxRollbackMeta struct {
ConstructorContext context.Context
}
type PreQueryMeta struct {
Context context.Context
TransactionConstructorContext context.Context
}
type PreExecMeta struct {
Context context.Context
TransactionConstructorContext context.Context
}
type PostPingMeta struct {
Context context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostTxBeginMeta struct {
Context context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostTxCommitMeta struct {
ConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
ExecCounter int
QueryCounter int
}
type PostTxRollbackMeta struct {
ConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
ExecCounter int
QueryCounter int
}
type PostQueryMeta struct {
Context context.Context
TransactionConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostExecMeta struct {
Context context.Context
TransactionConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
}
type Listener interface {
PrePing(ctx context.Context) error
PreTxBegin(ctx context.Context, txid uint16) error
PreTxCommit(txid uint16) error
PreTxRollback(txid uint16) error
PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error
PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error
PrePing(ctx context.Context, meta PrePingMeta) error
PreTxBegin(ctx context.Context, txid uint16, meta PreTxBeginMeta) error
PreTxCommit(txid uint16, meta PreTxCommitMeta) error
PreTxRollback(txid uint16, meta PreTxRollbackMeta) error
PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error
PreExec(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error
PostPing(result error)
PostTxBegin(txid uint16, result error)
PostTxCommit(txid uint16, result error)
PostTxRollback(txid uint16, result error)
PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP)
PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP)
PostPing(result error, meta PostPingMeta)
PostTxBegin(txid uint16, result error, meta PostTxBeginMeta)
PostTxCommit(txid uint16, result error, meta PostTxCommitMeta)
PostTxRollback(txid uint16, result error, meta PostTxRollbackMeta)
PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)
PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)
}
type genListener struct {
prePing func(ctx context.Context) error
preTxBegin func(ctx context.Context, txid uint16) error
preTxCommit func(txid uint16) error
preTxRollback func(txid uint16) error
preQuery func(ctx context.Context, txID *uint16, sql *string, params *PP) error
preExec func(ctx context.Context, txID *uint16, sql *string, params *PP) error
postPing func(result error)
postTxBegin func(txid uint16, result error)
postTxCommit func(txid uint16, result error)
postTxRollback func(txid uint16, result error)
postQuery func(txID *uint16, sqlOriginal string, sqlReal string, params PP)
postExec func(txID *uint16, sqlOriginal string, sqlReal string, params PP)
prePing func(ctx context.Context, meta PrePingMeta) error
preTxBegin func(ctx context.Context, txid uint16, meta PreTxBeginMeta) error
preTxCommit func(txid uint16, meta PreTxCommitMeta) error
preTxRollback func(txid uint16, meta PreTxRollbackMeta) error
preQuery func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error
preExec func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error
postPing func(result error, meta PostPingMeta)
postTxBegin func(txid uint16, result error, meta PostTxBeginMeta)
postTxCommit func(txid uint16, result error, meta PostTxCommitMeta)
postTxRollback func(txid uint16, result error, meta PostTxRollbackMeta)
postQuery func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)
postExec func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)
}
func (g genListener) PrePing(ctx context.Context) error {
func (g genListener) PrePing(ctx context.Context, meta PrePingMeta) error {
if g.prePing != nil {
return g.prePing(ctx)
return g.prePing(ctx, meta)
} else {
return nil
}
}
func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error {
func (g genListener) PreTxBegin(ctx context.Context, txid uint16, meta PreTxBeginMeta) error {
if g.preTxBegin != nil {
return g.preTxBegin(ctx, txid)
return g.preTxBegin(ctx, txid, meta)
} else {
return nil
}
}
func (g genListener) PreTxCommit(txid uint16) error {
func (g genListener) PreTxCommit(txid uint16, meta PreTxCommitMeta) error {
if g.preTxCommit != nil {
return g.preTxCommit(txid)
return g.preTxCommit(txid, meta)
} else {
return nil
}
}
func (g genListener) PreTxRollback(txid uint16) error {
func (g genListener) PreTxRollback(txid uint16, meta PreTxRollbackMeta) error {
if g.preTxRollback != nil {
return g.preTxRollback(txid)
return g.preTxRollback(txid, meta)
} else {
return nil
}
}
func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error {
func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error {
if g.preQuery != nil {
return g.preQuery(ctx, txID, sql, params)
return g.preQuery(ctx, txID, sql, params, meta)
} else {
return nil
}
}
func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error {
func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error {
if g.preExec != nil {
return g.preExec(ctx, txID, sql, params)
return g.preExec(ctx, txID, sql, params, meta)
} else {
return nil
}
}
func (g genListener) PostPing(result error) {
func (g genListener) PostPing(result error, meta PostPingMeta) {
if g.postPing != nil {
g.postPing(result)
g.postPing(result, meta)
}
}
func (g genListener) PostTxBegin(txid uint16, result error) {
func (g genListener) PostTxBegin(txid uint16, result error, meta PostTxBeginMeta) {
if g.postTxBegin != nil {
g.postTxBegin(txid, result)
g.postTxBegin(txid, result, meta)
}
}
func (g genListener) PostTxCommit(txid uint16, result error) {
func (g genListener) PostTxCommit(txid uint16, result error, meta PostTxCommitMeta) {
if g.postTxCommit != nil {
g.postTxCommit(txid, result)
g.postTxCommit(txid, result, meta)
}
}
func (g genListener) PostTxRollback(txid uint16, result error) {
func (g genListener) PostTxRollback(txid uint16, result error, meta PostTxRollbackMeta) {
if g.postTxRollback != nil {
g.postTxRollback(txid, result)
g.postTxRollback(txid, result, meta)
}
}
func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) {
func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) {
if g.postQuery != nil {
g.postQuery(txID, sqlOriginal, sqlReal, params)
g.postQuery(txID, sqlOriginal, sqlReal, params, result, meta)
}
}
func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) {
func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) {
if g.postExec != nil {
g.postExec(txID, sqlOriginal, sqlReal, params)
g.postExec(txID, sqlOriginal, sqlReal, params, result, meta)
}
}
func NewPrePingListener(f func(ctx context.Context) error) Listener {
func NewPrePingListener(f func(ctx context.Context, meta PrePingMeta) error) Listener {
return genListener{prePing: f}
}
func NewPreTxBeginListener(f func(ctx context.Context, txid uint16) error) Listener {
func NewPreTxBeginListener(f func(ctx context.Context, txid uint16, meta PreTxBeginMeta) error) Listener {
return genListener{preTxBegin: f}
}
func NewPreTxCommitListener(f func(txid uint16) error) Listener {
func NewPreTxCommitListener(f func(txid uint16, meta PreTxCommitMeta) error) Listener {
return genListener{preTxCommit: f}
}
func NewPreTxRollbackListener(f func(txid uint16) error) Listener {
func NewPreTxRollbackListener(f func(txid uint16, meta PreTxRollbackMeta) error) Listener {
return genListener{preTxRollback: f}
}
func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener {
func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error) Listener {
return genListener{preQuery: f}
}
func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener {
func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error) Listener {
return genListener{preExec: f}
}
func NewPreListener(f func(ctx context.Context, cmdtype string, txID *uint16, sql *string, params *PP) error) Listener {
return genListener{
preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP) error {
preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error {
return f(ctx, "EXEC", txID, sql, params)
},
preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP) error {
preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error {
return f(ctx, "QUERY", txID, sql, params)
},
}
}
func NewPostPingListener(f func(result error)) Listener {
func NewPostPingListener(f func(result error, meta PostPingMeta)) Listener {
return genListener{postPing: f}
}
func NewPostTxBeginListener(f func(txid uint16, result error)) Listener {
func NewPostTxBeginListener(f func(txid uint16, result error, meta PostTxBeginMeta)) Listener {
return genListener{postTxBegin: f}
}
func NewPostTxCommitListener(f func(txid uint16, result error)) Listener {
func NewPostTxCommitListener(f func(txid uint16, result error, meta PostTxCommitMeta)) Listener {
return genListener{postTxCommit: f}
}
func NewPostTxRollbackListener(f func(txid uint16, result error)) Listener {
func NewPostTxRollbackListener(f func(txid uint16, result error, meta PostTxRollbackMeta)) Listener {
return genListener{postTxRollback: f}
}
func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener {
func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)) Listener {
return genListener{postQuery: f}
}
func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener {
func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)) Listener {
return genListener{postExec: f}
}
func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener {
func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, result error, params PP)) Listener {
return genListener{
postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) {
f("EXEC", txID, sqlOriginal, sqlReal, params)
postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) {
f("EXEC", txID, sqlOriginal, sqlReal, result, params)
},
postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) {
f("QUERY", txID, sqlOriginal, sqlReal, params)
postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) {
f("QUERY", txID, sqlOriginal, sqlReal, result, params)
},
}
}

View File

@@ -6,6 +6,7 @@ import (
"github.com/jmoiron/sqlx"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"time"
)
type TxStatus string
@@ -26,62 +27,78 @@ type Tx interface {
}
type transaction struct {
tx *sqlx.Tx
id uint16
status TxStatus
execCtr int
queryCtr int
db *database
constructorContext context.Context
tx *sqlx.Tx
id uint16
status TxStatus
execCtr int
queryCtr int
db *database
}
func NewTransaction(xtx *sqlx.Tx, txid uint16, db *database) Tx {
func newTransaction(ctx context.Context, xtx *sqlx.Tx, txid uint16, db *database) Tx {
return &transaction{
tx: xtx,
id: txid,
status: TxStatusInitial,
execCtr: 0,
queryCtr: 0,
db: db,
constructorContext: ctx,
tx: xtx,
id: txid,
status: TxStatusInitial,
execCtr: 0,
queryCtr: 0,
db: db,
}
}
func (tx *transaction) Rollback() error {
t0 := time.Now()
preMeta := PreTxRollbackMeta{ConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreTxRollback(tx.id)
err := v.PreTxRollback(tx.id, preMeta)
if err != nil {
return exerr.Wrap(err, "failed to call SQL pre-rollback listener").Int("tx.id", int(tx.id)).Build()
}
}
t1 := time.Now()
result := tx.tx.Rollback()
if result == nil {
tx.status = TxStatusRollback
}
postMeta := PostTxRollbackMeta{ConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now(), ExecCounter: tx.execCtr, QueryCounter: tx.queryCtr}
for _, v := range tx.db.lstr {
v.PostTxRollback(tx.id, result)
v.PostTxRollback(tx.id, result, postMeta)
}
return result
}
func (tx *transaction) Commit() error {
t0 := time.Now()
preMeta := PreTxCommitMeta{ConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreTxCommit(tx.id)
err := v.PreTxCommit(tx.id, preMeta)
if err != nil {
return exerr.Wrap(err, "failed to call SQL pre-commit listener").Int("tx.id", int(tx.id)).Build()
}
}
t1 := time.Now()
result := tx.tx.Commit()
if result == nil {
tx.status = TxStatusComitted
}
postMeta := PostTxCommitMeta{ConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now(), ExecCounter: tx.execCtr, QueryCounter: tx.queryCtr}
for _, v := range tx.db.lstr {
v.PostTxRollback(tx.id, result)
v.PostTxCommit(tx.id, result, postMeta)
}
return result
@@ -89,21 +106,29 @@ func (tx *transaction) Commit() error {
func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreExecMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep)
err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
res, err := tx.tx.NamedExecContext(ctx, sqlstr, prep)
tx.execCtr++
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
postMeta := PostExecMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now()}
for _, v := range tx.db.lstr {
v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep)
v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
@@ -114,21 +139,29 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re
func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreQueryMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep)
err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
rows, err := sqlx.NamedQueryContext(ctx, tx.tx, sqlstr, prep)
tx.queryCtr++
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
postMeta := PostQueryMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now()}
for _, v := range tx.db.lstr {
v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep)
v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep, err, postMeta)
}
if err != nil {

44
timeext/diff.go Normal file
View File

@@ -0,0 +1,44 @@
package timeext
import "time"
// YearDifference calculates the difference between two timestamps in years.
// = t1 - t2
// returns a float value
func YearDifference(t1 time.Time, t2 time.Time, tz *time.Location) float64 {
yDelta := float64(t1.Year() - t2.Year())
processT1 := float64(t1.Sub(TimeToYearStart(t1, tz))) / float64(TimeToYearEnd(t1, tz).Sub(TimeToYearStart(t1, tz)))
processT2 := float64(t2.Sub(TimeToYearStart(t2, tz))) / float64(TimeToYearEnd(t2, tz).Sub(TimeToYearStart(t2, tz)))
return yDelta + (processT1 - processT2)
}
// MonthDifference calculates the difference between two timestamps in months.
// = t1 - t2
// returns a float value
func MonthDifference(t1 time.Time, t2 time.Time) float64 {
yDelta := float64(t1.Year() - t2.Year())
mDelta := float64(t1.Month() - t2.Month())
dDelta := float64(0)
t1MonthDays := DaysInMonth(t1)
t2MonthDays := DaysInMonth(t2)
if t2.Year() > t1.Year() || (t2.Year() == t1.Year() && t2.Month() > t1.Month()) {
dDelta -= 1
dDelta += float64(t1MonthDays-t1.Day()) / float64(t1MonthDays)
dDelta += float64(t2.Day()) / float64(t2MonthDays)
} else if t2.Year() < t1.Year() || (t2.Year() == t1.Year() && t2.Month() < t1.Month()) {
dDelta -= 1
dDelta += float64(t1.Day()) / float64(t1MonthDays)
dDelta += float64(t2MonthDays-t2.Day()) / float64(t2MonthDays)
} else {
dDelta += float64(t1.Day()-t2.Day()) / float64(t1MonthDays)
}
return yDelta*12 + mDelta + dDelta
}

143
timeext/diff_test.go Normal file
View File

@@ -0,0 +1,143 @@
package timeext
import (
"math"
"testing"
"time"
)
func TestYearDifferenceWithSameYearAndDay(t *testing.T) {
t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.0
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithOneYearApart(t *testing.T) {
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 1.0
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithDifferentMonths(t *testing.T) {
t1 := time.Date(2020, 6, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.4166666666666667 // Approximation of 5/12 months
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceAcrossYears(t *testing.T) {
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 6, 1, 0, 0, 0, 0, time.UTC)
expected := 0.5833333333333334 // Approximation of 7/12 months
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithTimezone(t *testing.T) {
tz, _ := time.LoadLocation("America/New_York")
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, tz)
t2 := time.Date(2020, 6, 1, 0, 0, 0, 0, tz)
expected := 0.5833333333333334 // Same as UTC but ensuring timezone is considered
result := YearDifference(t1, t2, tz)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithNegativeDifference(t *testing.T) {
t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, TimezoneBerlin)
t2 := time.Date(2021, 1, 1, 0, 0, 0, 0, TimezoneBerlin)
expected := -1.0
result := YearDifference(t1, t2, TimezoneBerlin)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithNegativeDifference2(t *testing.T) {
t1 := time.Date(2020, 7, 1, 0, 0, 0, 0, TimezoneBerlin)
t2 := time.Date(2021, 7, 1, 0, 0, 0, 0, TimezoneBerlin)
expected := -1.0
result := YearDifference(t1, t2, TimezoneBerlin)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func epsilonEquals(a, b float64) bool {
epsilon := 0.01
return math.Abs(a-b) < epsilon
}
func TestMonthDifferenceSameDate(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceSameMonth(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 31, 0, 0, 0, 0, time.UTC)
expected := 0.967741935483871 // Approximation of 30/31 days
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceDifferentMonthsSameYear(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 3, 1, 0, 0, 0, 0, time.UTC)
expected := 2.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceDifferentYears(t *testing.T) {
t1 := time.Date(2021, 12, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 2, 1, 0, 0, 0, 0, time.UTC)
expected := 2.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceT1BeforeT2(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 6, 1, 0, 0, 0, 0, time.UTC)
expected := 5.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceT1AfterT2(t *testing.T) {
t1 := time.Date(2022, 6, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
expected := -5.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}

View File

@@ -65,6 +65,10 @@ func TimeToYearEnd(t time.Time, tz *time.Location) time.Time {
return TimeToYearStart(t, tz).AddDate(1, 0, 0).Add(-1)
}
func TimeToNextYearStart(t time.Time, tz *time.Location) time.Time {
return TimeToYearStart(t, tz).AddDate(1, 0, 0)
}
// IsSameDayIncludingDateBoundaries returns true if t1 and t2 are part of the same day (TZ/Berlin), the boundaries of the day are
// inclusive, this means 2021-09-15T00:00:00 is still part of the day 2021-09-14
func IsSameDayIncludingDateBoundaries(t1 time.Time, t2 time.Time, tz *time.Location) bool {
@@ -146,3 +150,44 @@ func UnixFloatSeconds(v float64) time.Time {
func FloorTime(t time.Time) time.Time {
return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
}
func SubtractYears(t time.Time, yearCount float64, tz *time.Location) time.Time {
t = t.In(tz)
if yearCount < 0 {
return AddYears(t, -yearCount, tz)
}
intCount, floatCount := math.Modf(yearCount)
t = t.AddDate(-int(intCount), 0, 0)
t0 := TimeToYearStart(t, tz)
t1 := TimeToYearEnd(t, tz)
return t.Add(time.Duration(float64(t1.Sub(t0)) * floatCount * -1))
}
func AddYears(t time.Time, yearCount float64, tz *time.Location) time.Time {
t = t.In(tz)
if yearCount < 0 {
return SubtractYears(t, -yearCount, tz)
}
intCount, floatCount := math.Modf(yearCount)
t = t.AddDate(int(intCount), 0, 0)
t0 := TimeToYearStart(t, tz)
t1 := TimeToYearEnd(t, tz)
return t.Add(time.Duration(float64(t1.Sub(t0)) * floatCount))
}
func DaysInMonth(t time.Time) int {
// https://stackoverflow.com/a/73882035/1761622
y, m, _ := t.Date()
return time.Date(y, m+1, 0, 0, 0, 0, 0, time.UTC).Day()
}

229
timeext/time_test.go Normal file
View File

@@ -0,0 +1,229 @@
package timeext
import (
"testing"
"time"
)
func TestTimeToDayStart(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 1, 13, 14, 15, 0, tz)
expected := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
result := TimeToDayStart(tm, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestTimeToDayEnd(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 1, 13, 14, 15, 0, tz)
expected := time.Date(2022, 1, 2, 0, 0, 0, 0, tz).Add(-1)
result := TimeToDayEnd(tm, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIsSameDayIncludingDateBoundaries(t *testing.T) {
tz := TimezoneBerlin
t1 := time.Date(2022, 1, 1, 23, 59, 59, 0, tz)
t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, tz)
if !IsSameDayIncludingDateBoundaries(t1, t2, tz) {
t.Errorf("Expected %v and %v to be the same day", t1, t2)
}
}
func TestIsDatePartEqual(t *testing.T) {
tz := TimezoneBerlin
t1 := time.Date(2022, 1, 1, 23, 59, 59, 0, tz)
t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
if !IsDatePartEqual(t1, t2, tz) {
t.Errorf("Expected %v and %v to have the same date part", t1, t2)
}
}
func TestWithTimePart(t *testing.T) {
tz := TimezoneBerlin
base := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
expected := time.Date(2022, 1, 1, 13, 14, 15, 0, tz)
result := WithTimePart(base, 13, 14, 15)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestCombineDateAndTime(t *testing.T) {
tz := TimezoneBerlin
d := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
tm := time.Date(0, 0, 0, 13, 14, 15, 0, tz)
expected := time.Date(2022, 1, 1, 13, 14, 15, 0, tz)
result := CombineDateAndTime(d, tm)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIsSunday(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 2, 0, 0, 0, 0, tz) // 2nd January 2022 is a Sunday
if !IsSunday(tm, tz) {
t.Errorf("Expected %v to be a Sunday", tm)
}
}
func TestIsSunday_OnSunday(t *testing.T) {
sunday := time.Date(2022, 5, 15, 0, 0, 0, 0, TimezoneBerlin) // A Sunday
if !IsSunday(sunday, TimezoneBerlin) {
t.Errorf("Expected true for Sunday")
}
}
func TestDurationFromTime(t *testing.T) {
expected := time.Duration(13*time.Hour + 14*time.Minute + 15*time.Second)
result := DurationFromTime(13, 14, 15)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestMin(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, time.UTC)
expected := t1
result := Min(t1, t2)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestMax(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 2, 0, 0, 0, 0, time.UTC)
expected := t2
result := Max(t1, t2)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestUnixFloatSeconds(t *testing.T) {
v := 1640995200.0 // 1st January 2022 00:00:00 UTC in Unix timestamp
expected := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
result := UnixFloatSeconds(v)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestFloorTime(t *testing.T) {
tm := time.Date(2022, 1, 1, 13, 14, 15, 0, time.UTC)
expected := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
result := FloorTime(tm)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestSubtractYears(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
expected := time.Date(2021, 1, 1, 0, 0, 0, 0, tz)
result := SubtractYears(tm, 1, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
expected = time.Date(2020, 1, 1, 0, 0, 0, 0, tz)
result = SubtractYears(tm, 2, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
expected = time.Date(2019, 1, 1, 0, 0, 0, 0, tz)
result = SubtractYears(tm, 3, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
expected = time.Date(2025, 1, 1, 0, 0, 0, 0, tz)
result = SubtractYears(tm, -3, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestAddYears(t *testing.T) {
tz := TimezoneBerlin
tm := time.Date(2022, 1, 1, 0, 0, 0, 0, tz)
expected := time.Date(2023, 1, 1, 0, 0, 0, 0, tz)
result := AddYears(tm, 1, tz)
if !result.Equal(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIsDatePartEqual_SameDateDifferentTimes(t *testing.T) {
tz := time.UTC
t1 := time.Date(2022, 5, 18, 10, 30, 0, 0, tz)
t2 := time.Date(2022, 5, 18, 20, 45, 0, 0, tz)
if !IsDatePartEqual(t1, t2, tz) {
t.Errorf("Expected dates to be equal")
}
}
func TestWithTimePart_ChangeTime(t *testing.T) {
base := time.Date(2022, 5, 18, 0, 0, 0, 0, time.UTC)
result := WithTimePart(base, 15, 30, 45)
expected := time.Date(2022, 5, 18, 15, 30, 45, 0, time.UTC)
if !result.Equal(expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestCombineDateAndTime_CombineDifferentParts(t *testing.T) {
date := time.Date(2022, 5, 18, 0, 0, 0, 0, time.UTC)
timePart := time.Date(2000, 1, 1, 15, 30, 45, 0, time.UTC)
result := CombineDateAndTime(date, timePart)
expected := time.Date(2022, 5, 18, 15, 30, 45, 0, time.UTC)
if !result.Equal(expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestDaysInMonth_31Days(t *testing.T) {
date := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) // January
expected := 31
result := DaysInMonth(date)
if result != expected {
t.Errorf("Expected %d but got %d", expected, result)
}
}
func TestDaysInMonth_30Days(t *testing.T) {
date := time.Date(2022, 4, 1, 0, 0, 0, 0, time.UTC) // April
expected := 30
result := DaysInMonth(date)
if result != expected {
t.Errorf("Expected %d but got %d", expected, result)
}
}
func TestDaysInMonth_FebruaryLeapYear(t *testing.T) {
date := time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC) // February in a leap year
expected := 29
result := DaysInMonth(date)
if result != expected {
t.Errorf("Expected %d but got %d", expected, result)
}
}
func TestDaysInMonth_FebruaryNonLeapYear(t *testing.T) {
date := time.Date(2021, 2, 1, 0, 0, 0, 0, time.UTC) // February in a non-leap year
expected := 28
result := DaysInMonth(date)
if result != expected {
t.Errorf("Expected %d but got %d", expected, result)
}
}

View File

@@ -52,6 +52,7 @@ type Coll[TData any] struct {
customDecoder *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface)
isInterfaceDataType bool // true if TData is an interface (not a struct)
unmarshalHooks []func(d TData) TData // called for every object after unmarshalling
marshalHooks []func(d TData) TData // called for every object before marshalling
extraModPipeline []func(ctx context.Context) mongo.Pipeline // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc
}
@@ -83,18 +84,32 @@ func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable
return c
}
// WithUnmarshalHook
// function that is called for every object after reading from DB
func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] {
c.unmarshalHooks = append(c.unmarshalHooks, fn)
return c
}
// WithMarshalHook
// function that is called for every object before writing to DB
func (c *Coll[TData]) WithMarshalHook(fn func(d TData) TData) *Coll[TData] {
c.marshalHooks = append(c.marshalHooks, fn)
return c
}
// WithModifyingPipeline
// pipeline that is appended to all read operations (after filtering)
func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] {
c.extraModPipeline = append(c.extraModPipeline, func(ctx context.Context) mongo.Pipeline { return p })
return c
}
// WithModifyingPipelineFunc
// pipeline that is appended to all read operations (after filtering)
func (c *Coll[TData]) WithModifyingPipelineFunc(fn func(ctx context.Context) mongo.Pipeline) *Coll[TData] {
c.extraModPipeline = append(c.extraModPipeline, fn)
@@ -105,23 +120,37 @@ func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirecti
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)
if err != nil {
return ct.CursorToken{}, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
return nil, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
}
valueSeconary := ""
if fieldSecondary != nil && dirSecondary != nil {
valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary)
if err != nil {
return ct.CursorToken{}, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
return nil, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
}
}
return ct.CursorToken{
Mode: ct.CTMNormal,
ValuePrimary: valuePrimary,
ValueSecondary: valueSeconary,
Direction: dirPrimary,
PageSize: langext.Coalesce(pageSize, 0),
Extra: ct.Extra{},
}, nil
return ct.NewKeySortToken(
valuePrimary,
valueSeconary,
dirPrimary,
dirPrimary,
langext.Coalesce(pageSize, 0),
ct.Extra{},
), nil
}
func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool {
for _, ppl := range c.extraModPipeline {
for _, stage := range ppl(ctx) {
for _, bsone := range stage {
if bsone.Key == "$group" {
// a group stage in extraModPipeline results in unsorted data, which means the caller must sort again after these pipeline stages...
return true
}
}
}
}
return false
}

View File

@@ -19,6 +19,8 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
res, err := c.decodeAll(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode values").Build()
@@ -38,6 +40,8 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {
@@ -60,6 +64,8 @@ func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline,
return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {

View File

@@ -36,6 +36,14 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.
pipeline = langext.ArrConcat(pipeline, ppl(ctx))
}
if c.needsDoubleSort(ctx) {
for _, opt := range opts {
if opt != nil && opt.Sort != nil {
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: opt.Sort}})
}
}
}
for _, opt := range opts {
if opt != nil && opt.Projection != nil {
pipeline = append(pipeline, bson.D{{Key: "$project", Value: opt.Projection}})
@@ -56,6 +64,8 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
res, err := c.decodeAll(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode values").Build()

View File

@@ -12,7 +12,11 @@ import (
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
r, err := c.findOneInternal(ctx, filter, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build()
if filterId, ok := filter["_id"]; ok {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Any("filter", filter).Any("filter_id", filterId).Build()
} else {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Any("filter", filter).Build()
}
}
return *r, nil
@@ -21,7 +25,7 @@ func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error)
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
r, err := c.findOneInternal(ctx, filter, true)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build()
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Any("filter", filter).Build()
}
return r, nil
@@ -58,7 +62,11 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN
return nil, nil
}
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).NoLog().Build()
if filterId, ok := filter["_id"]; ok {
return nil, exerr.Wrap(err, "mongo-query[find-one|internal] failed").Str("collection", c.Name()).Any("filter", filter).Any("filter_id", filterId).NoLog().Build()
} else {
return nil, exerr.Wrap(err, "mongo-query[find-one|internal] failed").Str("collection", c.Name()).Any("filter", filter).NoLog().Build()
}
}
return &res, nil
@@ -80,6 +88,8 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build()
}
defer func() { _ = cursor.Close(ctx) }()
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {

View File

@@ -9,6 +9,10 @@ import (
)
func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) {
for _, hook := range c.marshalHooks {
valueIn = hook(valueIn)
}
insRes, err := c.coll.InsertOne(ctx, valueIn)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
@@ -36,6 +40,12 @@ func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TDat
}
func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) {
for _, hook := range c.marshalHooks {
for i := 0; i < len(valueIn); i++ {
valueIn[i] = hook(valueIn[i])
}
}
insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn))
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build()

View File

@@ -10,10 +10,36 @@ import (
)
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
if inTok == nil {
inTok = ct.Start()
}
if ctks, ok := inTok.(ct.CTKeySort); ok {
d, tok, err := c.listWithKSToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else if ctks, ok := inTok.(ct.CTPaginated); ok {
d, tok, err := c.listWithPaginatedToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else {
return nil, ct.End(), exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build()
}
}
func (c *Coll[TData]) listWithKSToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTKeySort) ([]TData, ct.CursorToken, error) {
if inTok.Mode == ct.CTMEnd {
return make([]TData, 0), ct.End(), nil
}
if pageSize != nil && *pageSize == 0 {
return make([]TData, 0), inTok, nil // fast track, we return an empty list and do not advance the cursor token
}
pipeline := mongo.Pipeline{}
pf1 := "_id"
pd1 := ct.SortASC
@@ -21,8 +47,8 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
pd2 := ct.SortASC
if filter != nil {
pipeline = filter.FilterQuery()
pf1, pd1, pf2, pd2 = filter.Pagination()
pipeline = filter.FilterQuery(ctx)
pf1, pd1, pf2, pd2 = filter.Pagination(ctx)
}
sortPrimary := pf1
@@ -35,9 +61,9 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
sortDirSecondary = nil
}
paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
if err != nil {
return nil, ct.CursorToken{}, exerr.
return nil, nil, exerr.
Wrap(err, "failed to create pagination").
WithType(exerr.TypeCursorTokenDecode).
Str("collection", c.Name()).
@@ -56,16 +82,22 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
pipeline = langext.ArrConcat(pipeline, ppl(ctx))
}
if c.needsDoubleSort(ctx) {
pipeline = langext.ArrConcat(pipeline, doubleSortPipeline)
}
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
// fast branch
if pageSize == nil {
entries, err := c.decodeAll(ctx, cursor)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build()
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
}
return entries, ct.End(), nil
}
@@ -75,7 +107,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
var entry TData
entry, err = c.decodeSingle(ctx, cursor)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build()
return nil, nil, exerr.Wrap(err, "failed to decode entity").Build()
}
entities = append(entities, entry)
}
@@ -90,18 +122,76 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build()
return nil, nil, exerr.Wrap(err, "failed to create (out)-token").Build()
}
return entities, nextToken, nil
}
func (c *Coll[TData]) listWithPaginatedToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTPaginated) ([]TData, ct.CursorToken, error) {
var err error
page := inTok.Page
if page < 0 {
page = 1
}
pipelineSort := mongo.Pipeline{}
pipelineFilter := mongo.Pipeline{}
if filter != nil {
pipelineFilter = filter.FilterQuery(ctx)
pf1, pd1, pf2, pd2 := filter.Pagination(ctx)
pipelineSort, err = createSortOnlyPipeline(pf1, pd1, &pf2, &pd2)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to create sort pipeline").Build()
}
}
pipelinePaginate := mongo.Pipeline{}
if pageSize != nil {
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *pageSize * (page - 1)}})
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *pageSize}})
} else {
page = 1
}
pipelineCount := mongo.Pipeline{}
pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}})
extrModPipelineResolved := mongo.Pipeline{}
for _, ppl := range c.extraModPipeline {
extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx))
}
pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
if err != nil {
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
entities, err := c.decodeAll(ctx, cursorList)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
}
tokOut := ct.Page(page + 1)
if pageSize == nil || len(entities) < *pageSize {
tokOut = ct.PageEnd()
}
return entities, tokOut, nil
}
func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) {
type countRes struct {
Count int64 `bson:"c"`
}
pipeline := filter.FilterQuery()
pipeline := filter.FilterQuery(ctx)
pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}})
@@ -110,6 +200,8 @@ func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, er
return 0, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
if cursor.Next(ctx) {
v := countRes{}
err = cursor.Decode(&v)
@@ -126,24 +218,60 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
count, err := c.Count(ctx, filter)
if err != nil {
return nil, ct.CursorToken{}, 0, err
return nil, nil, 0, err
}
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, ct.CursorToken{}, 0, err
return nil, nil, 0, err
}
return data, token, count, nil
}
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]string, error) {
type idObject struct {
ID string `bson:"_id"`
}
pipelineFilter := mongo.Pipeline{}
if filter != nil {
pipelineFilter = filter.FilterQuery(ctx)
}
extrModPipelineResolved := mongo.Pipeline{}
for _, ppl := range c.extraModPipeline {
extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx))
}
pipelineProjectIDs := mongo.Pipeline{}
pipelineProjectIDs = append(pipelineProjectIDs, bson.D{{Key: "$project", Value: bson.M{"_id": 1}}})
pipelineList := langext.ArrConcat(pipelineFilter, extrModPipelineResolved, pipelineProjectIDs)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
var res []idObject
err = cursorList.All(ctx, &res)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode entities").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil
}
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CTKeySort, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) {
cond := bson.A{}
sort := bson.D{}
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
return nil, nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
}
if sortPrimary == ct.SortASC {
@@ -160,28 +288,28 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
return nil, nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
}
if *sortSecondary == ct.SortASC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{"$or": bson.A{bson.M{fieldPrimary: valuePrimary}, bson.M{fieldPrimary: nil}, bson.M{fieldPrimary: bson.M{"$exists": false}}}},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{"$or": bson.A{bson.M{fieldPrimary: valuePrimary}, bson.M{fieldPrimary: nil}, bson.M{fieldPrimary: bson.M{"$exists": false}}}},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1})
}
}
@@ -203,15 +331,47 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken
} else {
return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
return nil, nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
}
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}}
if pageSize != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
}
return pipeline, nil
return pipeline, pipelineSort, nil
}
func createSortOnlyPipeline(fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection) ([]bson.D, error) {
sort := bson.D{}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
if *sortSecondary == ct.SortASC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1})
}
}
pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}}
return pipelineSort, nil
}

View File

@@ -23,8 +23,8 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page
sort := bson.D{}
if filter != nil {
pipelineFilter = filter.FilterQuery()
sort = filter.Sort()
pipelineFilter = filter.FilterQuery(ctx)
sort = filter.Sort(ctx)
}
if len(sort) != 0 {
@@ -60,19 +60,27 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page
return nil, pag.Pagination{}, exerr.Wrap(err, "failed to all-decode entities").Build()
}
cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
var tcRes totalCountResult
if cursorTotalCount.Next(ctx) {
err = cursorTotalCount.Decode(&tcRes)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
if limit == nil {
// optimization, limit==nil, so we query all entities anyway, just use the array length
tcRes.Count = len(entities)
} else {
tcRes.Count = 0 // no entries in DB
cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
if cursorTotalCount.Next(ctx) {
err = cursorTotalCount.Decode(&tcRes)
if err != nil {
return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build()
}
} else {
tcRes.Count = 0 // no entries in DB
}
}
paginationObj := pag.Pagination{

View File

@@ -61,6 +61,10 @@ func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, update
}
func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error {
for _, hook := range c.marshalHooks {
value = hook(value)
}
_, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value})
if err != nil {
return exerr.Wrap(err, "mongo-query[replace-one] failed").
@@ -73,6 +77,10 @@ func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value
}
func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) {
for _, hook := range c.marshalHooks {
value = hook(value)
}
mongoRes := c.coll.FindOneAndReplace(ctx, filterQuery, value, options.FindOneAndReplace().SetReturnDocument(options.After))
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-and-update] failed").

1
wpdf/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
wpdf_test.pdf

BIN
wpdf/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

9
wpdf/utils.go Normal file
View File

@@ -0,0 +1,9 @@
package wpdf
func hexToColor(c uint32) PDFColor {
return PDFColor{R: int((c >> 16) & 0xFF), G: int((c >> 8) & 0xFF), B: int((c >> 0) & 0xFF)}
}
func rgbToColor(r, g, b int) PDFColor {
return PDFColor{R: r, G: g, B: b}
}

280
wpdf/wpdf.go Normal file
View File

@@ -0,0 +1,280 @@
package wpdf
import (
"bytes"
"github.com/jung-kurt/gofpdf"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type WPDFBuilder struct {
b *gofpdf.Fpdf
tr func(string) string
cellHeight float64
cellSpacing float64
fontName PDFFontFamily
fontStyle PDFFontStyle
fontSize float64
debug bool
}
type PDFMargins struct {
Left float64
Top float64
Right float64
}
type PDFColor struct {
R int
G int
B int
}
func NewPDFBuilder(orientation PDFOrientation, size PDFSize, unicode bool) *WPDFBuilder {
fpdfbuilder := gofpdf.New(string(orientation), "mm", string(size), "")
var tr func(string) string
if unicode {
tr = fpdfbuilder.UnicodeTranslatorFromDescriptor("")
} else {
tr = func(s string) string { return s }
}
b := &WPDFBuilder{
b: fpdfbuilder,
tr: tr,
cellHeight: 5,
cellSpacing: 1,
}
b.SetMargins(PDFMargins{Left: 15, Top: 25, Right: 15}) // default values
b.SetFont(FontHelvetica, Normal, 12) // ensures font is set
return b
}
func (b *WPDFBuilder) FPDF() *gofpdf.Fpdf {
return b.b
}
func (b *WPDFBuilder) SetMargins(v PDFMargins) {
b.b.SetMargins(v.Left, v.Top, v.Right)
}
func (b *WPDFBuilder) AddPage() {
b.b.AddPage()
if b.debug {
ml, mt, mr, mb := b.GetMargins()
pw, ph := b.GetPageSize()
b.Rect(pw-ml-mr, ph-mt-mb, RectOutline, NewPDFRectOpt().X(ml).Y(mt).LineWidth(0.25).DrawColor(0, 0, 128))
b.Rect(pw, mt, RectFill, NewPDFRectOpt().X(0).Y(0).FillColor(0, 0, 255).Alpha(0.2, BlendNormal))
b.Rect(ml, ph-mt-mb, RectFill, NewPDFRectOpt().X(0).Y(mt).FillColor(0, 0, 255).Alpha(0.2, BlendNormal))
b.Rect(mr, ph-mt-mb, RectFill, NewPDFRectOpt().X(pw-mr).Y(mt).FillColor(0, 0, 255).Alpha(0.2, BlendNormal))
b.Rect(pw, mb, RectFill, NewPDFRectOpt().X(0).Y(ph-mb).FillColor(0, 0, 255).Alpha(0.2, BlendNormal))
}
}
func (b *WPDFBuilder) SetTextColor(cr, cg, cb int) {
b.b.SetTextColor(cr, cg, cb)
}
func (b *WPDFBuilder) GetTextColor() (cr, cg, cb int) {
return b.b.GetTextColor()
}
func (b *WPDFBuilder) SetDrawColor(cr, cg, cb int) {
b.b.SetDrawColor(cr, cg, cb)
}
func (b *WPDFBuilder) GetDrawColor() (cr, cg, cb int) {
return b.b.GetDrawColor()
}
func (b *WPDFBuilder) SetFillColor(cr, cg, cb int) {
b.b.SetFillColor(cr, cg, cb)
}
func (b *WPDFBuilder) GetFillColor() (cr, cg, cb int) {
return b.b.GetFillColor()
}
func (b *WPDFBuilder) SetLineWidth(w float64) {
b.b.SetLineWidth(w)
}
func (b *WPDFBuilder) GetLineWidth() float64 {
return b.b.GetLineWidth()
}
func (b *WPDFBuilder) SetFont(fontName PDFFontFamily, fontStyle PDFFontStyle, fontSize float64) {
b.b.SetFont(string(fontName), string(fontStyle), fontSize)
b.fontName = fontName
b.fontStyle = fontStyle
b.fontSize = fontSize
b.cellHeight = b.b.PointConvert(fontSize)
}
func (b *WPDFBuilder) GetFontSize() float64 {
return b.fontSize
}
func (b *WPDFBuilder) GetFontFamily() PDFFontStyle {
return b.fontStyle
}
func (b *WPDFBuilder) GetFontStyle() float64 {
return b.fontSize
}
func (b *WPDFBuilder) SetCellSpacing(h float64) {
b.cellSpacing = h
}
func (b *WPDFBuilder) Ln(h float64) {
xBefore, yBefore := b.GetXY()
b.b.Ln(h)
yAfter := b.GetY()
if b.debug {
_, _, mr, _ := b.GetMargins()
pw, _ := b.GetPageSize()
b.Rect(pw-mr-xBefore, yAfter-yBefore, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(128, 128, 0).Alpha(0.5, BlendNormal))
b.Rect(pw-mr-xBefore, yAfter-yBefore, RectFill, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).FillColor(128, 128, 0).Alpha(0.1, BlendNormal))
b.Line(xBefore, yBefore, pw-mr, yAfter, NewPDFLineOpt().LineWidth(0.25).DrawColor(128, 128, 0))
}
}
func (b *WPDFBuilder) Build() ([]byte, error) {
buf := new(bytes.Buffer)
err := b.b.Output(buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (b *WPDFBuilder) SetX(x float64) {
b.b.SetX(x)
}
func (b *WPDFBuilder) IncX(dx float64) {
b.b.SetX(b.b.GetX() + dx)
}
func (b *WPDFBuilder) GetX() float64 {
return b.b.GetX()
}
func (b *WPDFBuilder) SetY(y float64) {
b.b.SetY(y)
}
func (b *WPDFBuilder) GetY() float64 {
return b.b.GetY()
}
func (b *WPDFBuilder) SetXY(x float64, y float64) {
b.b.SetXY(x, y)
}
func (b *WPDFBuilder) GetXY() (x float64, y float64) {
return b.b.GetXY()
}
func (b *WPDFBuilder) GetMargins() (left, top, right, bottom float64) {
return b.b.GetMargins()
}
func (b *WPDFBuilder) GetMarginLeft() float64 {
v, _, _, _ := b.b.GetMargins()
return v
}
func (b *WPDFBuilder) GetMarginTop() float64 {
_, v, _, _ := b.b.GetMargins()
return v
}
func (b *WPDFBuilder) GetMarginRight() float64 {
_, _, v, _ := b.b.GetMargins()
return v
}
func (b *WPDFBuilder) GetMarginBottom() float64 {
_, _, _, v := b.b.GetMargins()
return v
}
func (b *WPDFBuilder) GetPageSize() (width, height float64) {
return b.b.GetPageSize()
}
func (b *WPDFBuilder) GetPageWidth() float64 {
v, _ := b.b.GetPageSize()
return v
}
func (b *WPDFBuilder) GetPageHeight() float64 {
_, v := b.b.GetPageSize()
return v
}
func (b *WPDFBuilder) GetWorkAreaWidth() float64 {
return b.GetPageWidth() - b.GetMarginLeft() - b.GetMarginRight()
}
func (b *WPDFBuilder) SetAutoPageBreak(auto bool, margin float64) {
b.b.SetAutoPageBreak(auto, margin)
}
func (b *WPDFBuilder) SetFooterFunc(fnc func()) {
b.b.SetFooterFunc(fnc)
}
func (b *WPDFBuilder) PageNo() int {
return b.b.PageNo()
}
func (b *WPDFBuilder) Bookmark(txtStr string, level int, y float64) {
b.b.Bookmark(b.tr(txtStr), level, y)
}
func (b *WPDFBuilder) GetStringWidth(str string, opts ...PDFCellOpt) float64 {
var fontNameOverride *PDFFontFamily
var fontStyleOverride *PDFFontStyle
var fontSizeOverride *float64
for _, opt := range opts {
fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride)
fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride)
fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride)
}
if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil {
oldFontName := b.fontName
oldFontStyle := b.fontStyle
oldFontSize := b.fontSize
newFontName := langext.Coalesce(fontNameOverride, oldFontName)
newFontStyle := langext.Coalesce(fontStyleOverride, oldFontStyle)
newFontSize := langext.Coalesce(fontSizeOverride, oldFontSize)
b.SetFont(newFontName, newFontStyle, newFontSize)
defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }()
}
return b.b.GetStringWidth(str)
}
func (b *WPDFBuilder) Debug(v bool) {
b.debug = v
}

303
wpdf/wpdfCell.go Normal file
View File

@@ -0,0 +1,303 @@
package wpdf
import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type PDFCellOpt struct {
width *float64
height *float64
border *PDFBorder
ln *PDFTextBreak
align *PDFTextAlign
fill *bool
link *int
linkStr *string
fontNameOverride *PDFFontFamily
fontStyleOverride *PDFFontStyle
fontSizeOverride *float64
alphaOverride *dataext.Tuple[float64, PDFBlendMode]
extraLn *float64
x *float64
autoWidth *bool
textColor *PDFColor
borderColor *PDFColor
fillColor *PDFColor
autoWidthPaddingX *float64
debug *bool
}
func NewPDFCellOpt() *PDFCellOpt {
return &PDFCellOpt{}
}
func (opt *PDFCellOpt) Width(v float64) *PDFCellOpt {
opt.width = &v
return opt
}
func (opt *PDFCellOpt) Height(v float64) *PDFCellOpt {
opt.height = &v
return opt
}
func (opt *PDFCellOpt) Border(v PDFBorder) *PDFCellOpt {
opt.border = &v
return opt
}
func (opt *PDFCellOpt) LnPos(v PDFTextBreak) *PDFCellOpt {
opt.ln = &v
return opt
}
func (opt *PDFCellOpt) Align(v PDFTextAlign) *PDFCellOpt {
opt.align = &v
return opt
}
func (opt *PDFCellOpt) FillBackground(v bool) *PDFCellOpt {
opt.fill = &v
return opt
}
func (opt *PDFCellOpt) Link(v int) *PDFCellOpt {
opt.link = &v
return opt
}
func (opt *PDFCellOpt) LinkStr(v string) *PDFCellOpt {
opt.linkStr = &v
return opt
}
func (opt *PDFCellOpt) Font(fontName PDFFontFamily, fontStyle PDFFontStyle, fontSize float64) *PDFCellOpt {
opt.fontNameOverride = &fontName
opt.fontStyleOverride = &fontStyle
opt.fontSizeOverride = &fontSize
return opt
}
func (opt *PDFCellOpt) FontName(v PDFFontFamily) *PDFCellOpt {
opt.fontNameOverride = &v
return opt
}
func (opt *PDFCellOpt) FontStyle(v PDFFontStyle) *PDFCellOpt {
opt.fontStyleOverride = &v
return opt
}
func (opt *PDFCellOpt) FontSize(v float64) *PDFCellOpt {
opt.fontSizeOverride = &v
return opt
}
func (opt *PDFCellOpt) Bold() *PDFCellOpt {
opt.fontStyleOverride = langext.Ptr(Bold)
return opt
}
func (opt *PDFCellOpt) Italic() *PDFCellOpt {
opt.fontStyleOverride = langext.Ptr(Italic)
return opt
}
func (opt *PDFCellOpt) LnAfter(v float64) *PDFCellOpt {
opt.extraLn = &v
return opt
}
func (opt *PDFCellOpt) X(v float64) *PDFCellOpt {
opt.x = &v
return opt
}
func (opt *PDFCellOpt) AutoWidth() *PDFCellOpt {
opt.autoWidth = langext.PTrue
return opt
}
func (opt *PDFCellOpt) AutoWidthPaddingX(v float64) *PDFCellOpt {
opt.autoWidthPaddingX = &v
return opt
}
func (opt *PDFCellOpt) TextColor(cr, cg, cb int) *PDFCellOpt {
opt.textColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFCellOpt) TextColorHex(c uint32) *PDFCellOpt {
opt.textColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFCellOpt) BorderColor(cr, cg, cb int) *PDFCellOpt {
opt.borderColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFCellOpt) BorderColorHex(c uint32) *PDFCellOpt {
opt.borderColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFCellOpt) FillColor(cr, cg, cb int) *PDFCellOpt {
opt.fillColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFCellOpt) FillColorHex(c uint32) *PDFCellOpt {
opt.fillColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFCellOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFCellOpt {
opt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode}
return opt
}
func (opt *PDFCellOpt) Debug(v bool) *PDFCellOpt {
opt.debug = &v
return opt
}
func (opt *PDFCellOpt) Copy() *PDFCellOpt {
c := *opt
return &c
}
func (opt *PDFCellOpt) ToMulti() *PDFMultiCellOpt {
return &PDFMultiCellOpt{
width: opt.width,
height: opt.height,
border: opt.border,
align: opt.align,
fill: opt.fill,
fontNameOverride: opt.fontNameOverride,
fontStyleOverride: opt.fontStyleOverride,
fontSizeOverride: opt.fontSizeOverride,
extraLn: opt.extraLn,
x: opt.x,
textColor: opt.textColor,
borderColor: opt.borderColor,
fillColor: opt.fillColor,
}
}
func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) {
txtTR := b.tr(txt)
width := float64(0)
var height *float64 = nil
border := BorderNone
ln := BreakToNextLine
align := AlignLeft
fill := false
link := 0
linkStr := ""
var fontNameOverride *PDFFontFamily
var fontStyleOverride *PDFFontStyle
var fontSizeOverride *float64
var alphaOverride *dataext.Tuple[float64, PDFBlendMode]
extraLn := float64(0)
var x *float64
autoWidth := false
var textColor *PDFColor
var borderColor *PDFColor
var fillColor *PDFColor
autoWidthPaddingX := float64(0)
debug := b.debug
for _, opt := range opts {
width = langext.Coalesce(opt.width, width)
height = langext.CoalesceOpt(opt.height, height)
border = langext.Coalesce(opt.border, border)
ln = langext.Coalesce(opt.ln, ln)
align = langext.Coalesce(opt.align, align)
fill = langext.Coalesce(opt.fill, fill)
link = langext.Coalesce(opt.link, link)
linkStr = langext.Coalesce(opt.linkStr, linkStr)
fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride)
fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride)
fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride)
alphaOverride = langext.CoalesceOpt(opt.alphaOverride, alphaOverride)
extraLn = langext.Coalesce(opt.extraLn, extraLn)
x = langext.CoalesceOpt(opt.x, x)
autoWidth = langext.Coalesce(opt.autoWidth, autoWidth)
textColor = langext.CoalesceOpt(opt.textColor, textColor)
borderColor = langext.CoalesceOpt(opt.borderColor, borderColor)
fillColor = langext.CoalesceOpt(opt.fillColor, fillColor)
autoWidthPaddingX = langext.Coalesce(opt.autoWidthPaddingX, autoWidthPaddingX)
debug = langext.Coalesce(opt.debug, debug)
}
if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil {
oldFontName := b.fontName
oldFontStyle := b.fontStyle
oldFontSize := b.fontSize
newFontName := langext.Coalesce(fontNameOverride, oldFontName)
newFontStyle := langext.Coalesce(fontStyleOverride, oldFontStyle)
newFontSize := langext.Coalesce(fontSizeOverride, oldFontSize)
b.SetFont(newFontName, newFontStyle, newFontSize)
defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }()
}
if height == nil {
// (do after SetFont, so that b.cellHeight is correctly set to fontOverride)
height = langext.Ptr(b.cellHeight + b.cellSpacing)
}
if textColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetTextColor()
b.SetTextColor(textColor.R, textColor.G, textColor.B)
defer func() { b.SetTextColor(oldColorR, oldColorG, oldColorB) }()
}
if borderColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetDrawColor()
b.SetDrawColor(borderColor.R, borderColor.G, borderColor.B)
defer func() { b.SetDrawColor(oldColorR, oldColorG, oldColorB) }()
}
if fillColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetFillColor()
b.SetFillColor(fillColor.R, fillColor.G, fillColor.B)
defer func() { b.SetFillColor(oldColorR, oldColorG, oldColorB) }()
}
if alphaOverride != nil {
oldA, oldBMS := b.b.GetAlpha()
b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2))
defer func() { b.b.SetAlpha(oldA, oldBMS) }()
}
if x != nil {
b.b.SetX(*x)
}
if autoWidth {
width = b.GetStringWidth(txtTR, langext.ArrDeRef(opts)...) + autoWidthPaddingX
}
xBefore, yBefore := b.b.GetXY()
b.b.CellFormat(width, *height, txtTR, string(border), int(ln), string(align), fill, link, linkStr)
if debug {
if ln == BreakToNextLine {
b.Rect(b.GetPageWidth()-xBefore-b.GetMarginRight(), *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0))
} else if ln == BreakToRight {
b.Rect(b.GetX()-xBefore, *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0))
} else if ln == BreakToBelow {
b.Rect(b.GetPageWidth()-xBefore-b.GetMarginRight(), *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0))
}
}
if extraLn != 0 {
b.b.Ln(extraLn)
}
}

109
wpdf/wpdfConstants.go Normal file
View File

@@ -0,0 +1,109 @@
package wpdf
type PDFOrientation string
const (
Portrait PDFOrientation = "P"
Landscape PDFOrientation = "L"
)
type PDFSize string
const (
SizeA3 PDFSize = "A3"
SizeA4 PDFSize = "A4"
SizeA5 PDFSize = "A4"
SizeLetter PDFSize = "Letter"
SizeLegal PDFSize = "Legal"
SizeTabloid PDFSize = "Tabloid"
)
type PDFFontFamily string
const (
FontCourier PDFFontFamily = "courier"
FontHelvetica PDFFontFamily = "helvetica"
FontTimes PDFFontFamily = "times"
FontZapfDingbats PDFFontFamily = "zapfdingbats"
FontSymbol PDFFontFamily = "symbol"
)
type PDFFontStyle string
const (
Normal PDFFontStyle = ""
Bold PDFFontStyle = "B"
Italic PDFFontStyle = "I"
BoldItalic PDFFontStyle = "IB"
)
type PDFBorder string
const (
BorderNone PDFBorder = ""
BorderFull PDFBorder = "1"
BorderLeft PDFBorder = "L"
BorderTop PDFBorder = "T"
BorderRight PDFBorder = "R"
BorderBottom PDFBorder = "B"
BorderTLR PDFBorder = "TLR"
BorderLR PDFBorder = "LR"
)
type PDFTextBreak int
const (
BreakToRight PDFTextBreak = 0
BreakToNextLine PDFTextBreak = 1
BreakToBelow PDFTextBreak = 2
)
type PDFTextAlign string
const (
AlignLeft PDFTextAlign = "L"
AlignHorzCenter PDFTextAlign = "C"
AlignRight PDFTextAlign = "R"
)
type PDFRectStyle string
const (
RectFill PDFRectStyle = "F"
RectOutline PDFRectStyle = "D"
RectFillOutline PDFRectStyle = "FD"
)
type PDFBlendMode string
const (
BlendNormal PDFBlendMode = "Normal"
BlendMultiply PDFBlendMode = "Multiply"
BlendScreen PDFBlendMode = "Screen"
BlendOverlay PDFBlendMode = "Overlay"
BlendDarken PDFBlendMode = "Darken"
BlendLighten PDFBlendMode = "Lighten"
BlendColorDodge PDFBlendMode = "ColorDodge"
BlendColorBurn PDFBlendMode = "ColorBurn"
BlendHardLight PDFBlendMode = "HardLight"
BlendSoftLight PDFBlendMode = "SoftLight"
BlendDifference PDFBlendMode = "Difference"
BlendExclusion PDFBlendMode = "Exclusion"
BlendHue PDFBlendMode = "Hue"
BlendSaturation PDFBlendMode = "Saturation"
BlendColor PDFBlendMode = "Color"
BlendLuminosity PDFBlendMode = "Luminosity"
)
type PDFLineCapStyle string
const (
CapButt PDFLineCapStyle = "butt"
CapRound PDFLineCapStyle = "round"
CapSquare PDFLineCapStyle = "square"
)
const (
BackgroundFill = true
BackgroundTransparent = false
)

372
wpdf/wpdfImage.go Normal file
View File

@@ -0,0 +1,372 @@
package wpdf
import (
"bytes"
"github.com/jung-kurt/gofpdf"
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/imageext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"image"
"image/color"
"image/draw"
"net/http"
)
type PDFImageRef struct {
Info *gofpdf.ImageInfoType
Name string
Bin []byte
Image *image.Image
Mime string
}
type PDFImageRegisterOpt struct {
imageType *string
readDpi *bool
allowNegativePosition *bool
name *string
}
func NewPDFImageRegisterOpt() *PDFImageRegisterOpt {
return &PDFImageRegisterOpt{}
}
func (opt *PDFImageRegisterOpt) ImageType(v string) *PDFImageRegisterOpt {
opt.imageType = &v
return opt
}
func (opt *PDFImageRegisterOpt) ReadDpi(v bool) *PDFImageRegisterOpt {
opt.readDpi = &v
return opt
}
func (opt *PDFImageRegisterOpt) AllowNegativePosition(v bool) *PDFImageRegisterOpt {
opt.allowNegativePosition = &v
return opt
}
func (opt *PDFImageRegisterOpt) Name(v string) *PDFImageRegisterOpt {
opt.name = &v
return opt
}
func (b *WPDFBuilder) RegisterImage(bin []byte, opts ...*PDFImageRegisterOpt) *PDFImageRef {
imgName := "fpdf_img_" + langext.MustRawHexUUID()
imageType := ""
readDpi := false
allowNegativePosition := false
mime := "application/octet-stream"
for _, opt := range opts {
imageType = langext.Coalesce(opt.imageType, imageType)
readDpi = langext.Coalesce(opt.readDpi, readDpi)
allowNegativePosition = langext.Coalesce(opt.allowNegativePosition, allowNegativePosition)
imgName = langext.Coalesce(opt.name, imgName)
}
if imageType == "" {
ct := ""
if len(bin) > 512 {
ct = http.DetectContentType(bin[:512])
} else {
ct = http.DetectContentType(bin)
}
switch ct {
case "image/jpg":
imageType = "JPG"
mime = ct
case "image/jpeg":
imageType = "JPEG"
mime = ct
case "image/png":
imageType = "PNG"
mime = ct
case "image/gif":
imageType = "GIF"
mime = ct
}
} else {
switch imageType {
case "JPG":
case "JPEG":
mime = "image/jpeg"
case "PNG":
mime = "image/png"
case "GIF":
mime = "image/gif"
}
}
options := gofpdf.ImageOptions{
ImageType: imageType,
ReadDpi: readDpi,
AllowNegativePosition: allowNegativePosition,
}
info := b.b.RegisterImageOptionsReader(imgName, options, bytes.NewReader(bin))
return &PDFImageRef{
Name: imgName,
Info: info,
Bin: bin,
Image: nil,
Mime: mime,
}
}
type PDFImageOpt struct {
x *float64
y *float64
width *float64
height *float64
flow *bool
link *int
linkStr *string
imageType *string
readDpi *bool
allowNegativePosition *bool
imageFit *imageext.ImageFit
fillColor *color.Color
compression *imageext.ImageCompresson
reEncodePixelPerMM *float64
crop *imageext.ImageCrop
alphaOverride *dataext.Tuple[float64, PDFBlendMode]
debug *bool
}
func NewPDFImageOpt() *PDFImageOpt {
return &PDFImageOpt{}
}
func (opt *PDFImageOpt) X(v float64) *PDFImageOpt {
opt.x = &v
return opt
}
func (opt *PDFImageOpt) Y(v float64) *PDFImageOpt {
opt.y = &v
return opt
}
func (opt *PDFImageOpt) Width(v float64) *PDFImageOpt {
opt.width = &v
return opt
}
func (opt *PDFImageOpt) Height(v float64) *PDFImageOpt {
opt.height = &v
return opt
}
func (opt *PDFImageOpt) Debug(v bool) *PDFImageOpt {
opt.debug = &v
return opt
}
func (opt *PDFImageOpt) Flow(v bool) *PDFImageOpt {
opt.flow = &v
return opt
}
func (opt *PDFImageOpt) Link(v int) *PDFImageOpt {
opt.link = &v
return opt
}
func (opt *PDFImageOpt) LinkStr(v string) *PDFImageOpt {
opt.linkStr = &v
return opt
}
func (opt *PDFImageOpt) ImageType(v string) *PDFImageOpt {
opt.imageType = &v
return opt
}
func (opt *PDFImageOpt) ReadDpi(v bool) *PDFImageOpt {
opt.readDpi = &v
return opt
}
func (opt *PDFImageOpt) AllowNegativePosition(v bool) *PDFImageOpt {
opt.allowNegativePosition = &v
return opt
}
func (opt *PDFImageOpt) ImageFit(v imageext.ImageFit) *PDFImageOpt {
opt.imageFit = &v
return opt
}
func (opt *PDFImageOpt) FillColor(v color.Color) *PDFImageOpt {
opt.fillColor = &v
return opt
}
func (opt *PDFImageOpt) Compression(v imageext.ImageCompresson) *PDFImageOpt {
opt.compression = &v
return opt
}
func (opt *PDFImageOpt) ReEncodePixelPerMM(v float64) *PDFImageOpt {
opt.reEncodePixelPerMM = &v
return opt
}
func (opt *PDFImageOpt) Crop(cropX float64, cropY float64, cropWidth float64, cropHeight float64) *PDFImageOpt {
opt.crop = &imageext.ImageCrop{
CropX: cropX,
CropY: cropY,
CropWidth: cropWidth,
CropHeight: cropHeight,
}
return opt
}
func (opt *PDFImageOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFImageOpt {
opt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode}
return opt
}
func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) {
var err error
x := b.GetX()
y := b.GetY()
w := img.Info.Width()
h := img.Info.Height()
flow := true
link := 0
linkStr := ""
imageType := ""
readDpi := false
allowNegativePosition := false
reEncodePixelPerMM := 15.0
var imageFit *imageext.ImageFit = nil
var fillColor color.Color = color.Transparent
compression := imageext.CompressionPNGSpeed
debug := b.debug
var crop *imageext.ImageCrop = nil
var alphaOverride *dataext.Tuple[float64, PDFBlendMode]
for _, opt := range opts {
x = langext.Coalesce(opt.x, x)
y = langext.Coalesce(opt.y, y)
w = langext.Coalesce(opt.width, w)
h = langext.Coalesce(opt.height, h)
flow = langext.Coalesce(opt.flow, flow)
link = langext.Coalesce(opt.link, link)
linkStr = langext.Coalesce(opt.linkStr, linkStr)
imageType = langext.Coalesce(opt.imageType, imageType)
readDpi = langext.Coalesce(opt.readDpi, readDpi)
allowNegativePosition = langext.Coalesce(opt.allowNegativePosition, allowNegativePosition)
imageFit = langext.CoalesceOpt(opt.imageFit, imageFit)
fillColor = langext.Coalesce(opt.fillColor, fillColor)
compression = langext.Coalesce(opt.compression, compression)
reEncodePixelPerMM = langext.Coalesce(opt.reEncodePixelPerMM, reEncodePixelPerMM)
crop = langext.CoalesceOpt(opt.crop, crop)
debug = langext.Coalesce(opt.debug, debug)
alphaOverride = langext.CoalesceOpt(opt.alphaOverride, alphaOverride)
}
if flow {
y = b.GetY()
}
regName := img.Name
var subImageBounds *imageext.PercentageRectangle = nil
if imageFit != nil || fillColor != nil || crop != nil {
var dataimg image.Image
if img.Image != nil {
dataimg = *img.Image
} else {
dataimg, err = imageext.VerifyAndDecodeImage(bytes.NewReader(img.Bin), img.Mime)
if err != nil {
b.b.SetError(err)
return
}
}
if crop != nil {
dataimg, err = imageext.CropImage(dataimg, crop.CropX, crop.CropY, crop.CropWidth, crop.CropHeight)
if err != nil {
b.b.SetError(err)
return
}
}
if imageFit != nil {
pdfPixelPerMillimeter := 15.0
pxw := w * pdfPixelPerMillimeter
pxh := h * pdfPixelPerMillimeter
var dataImgRect imageext.PercentageRectangle
dataimg, dataImgRect, err = imageext.ObjectFitImage(dataimg, pxw, pxh, *imageFit, fillColor)
if err != nil {
b.b.SetError(err)
return
}
subImageBounds = &dataImgRect
}
if dataimg.ColorModel() != color.RGBAModel && dataimg.ColorModel() != color.NRGBAModel {
// the image cannto be 16bpp or similar - otherwise fpdf errors out
dataImgRGBA := image.NewNRGBA(image.Rect(0, 0, dataimg.Bounds().Dx(), dataimg.Bounds().Dy()))
draw.Draw(dataImgRGBA, dataImgRGBA.Bounds(), dataimg, dataimg.Bounds().Min, draw.Src)
dataimg = dataImgRGBA
}
bfr, imgMime, err := imageext.EncodeImage(dataimg, compression)
if err != nil {
b.b.SetError(err)
return
}
regName = regName + "_" + langext.MustRawHexUUID()
switch imgMime {
case "image/jpeg":
imageType = "JPEG"
case "image/png":
imageType = "PNG"
case "image/gif":
imageType = "GIF"
}
b.b.RegisterImageOptionsReader(regName, gofpdf.ImageOptions{ImageType: imageType}, &bfr)
}
if alphaOverride != nil {
oldA, oldBMS := b.b.GetAlpha()
b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2))
defer func() { b.b.SetAlpha(oldA, oldBMS) }()
}
fpdfOpt := gofpdf.ImageOptions{
ImageType: imageType,
ReadDpi: readDpi,
AllowNegativePosition: allowNegativePosition,
}
b.b.ImageOptions(regName, x, y, w, h, flow, fpdfOpt, link, linkStr)
if debug {
b.Rect(w, h, RectOutline, NewPDFRectOpt().X(x).Y(y).LineWidth(0.25).DrawColor(255, 0, 0))
if subImageBounds != nil {
r := subImageBounds.Of(imageext.Rectangle{X: x, Y: y, W: w, H: h})
b.Rect(r.W, r.H, RectOutline, NewPDFRectOpt().X(r.X).Y(r.Y).LineWidth(0.25).DrawColor(255, 0, 0))
b.Rect(r.W, r.H, RectFill, NewPDFRectOpt().X(r.X).Y(r.Y).FillColor(255, 0, 0).Alpha(0.2, BlendNormal))
b.Line(r.X, r.Y, r.X+r.W, r.Y+r.H, NewPDFLineOpt().LineWidth(0.25).DrawColor(255, 0, 0))
b.Line(r.X+r.W, r.Y, r.X, r.Y+r.H, NewPDFLineOpt().LineWidth(0.25).DrawColor(255, 0, 0))
}
}
}

96
wpdf/wpdfLine.go Normal file
View File

@@ -0,0 +1,96 @@
package wpdf
import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type PDFLineOpt struct {
lineWidth *float64
drawColor *PDFColor
alpha *dataext.Tuple[float64, PDFBlendMode]
capStyle *PDFLineCapStyle
debug *bool
}
func NewPDFLineOpt() *PDFLineOpt {
return &PDFLineOpt{}
}
func (opt *PDFLineOpt) LineWidth(v float64) *PDFLineOpt {
opt.lineWidth = &v
return opt
}
func (opt *PDFLineOpt) DrawColor(cr, cg, cb int) *PDFLineOpt {
opt.drawColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFLineOpt) DrawColorHex(c uint32) *PDFLineOpt {
opt.drawColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFLineOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFLineOpt {
opt.alpha = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode}
return opt
}
func (opt *PDFLineOpt) CapButt() *PDFLineOpt {
opt.capStyle = langext.Ptr(CapButt)
return opt
}
func (opt *PDFLineOpt) CapSquare() *PDFLineOpt {
opt.capStyle = langext.Ptr(CapSquare)
return opt
}
func (opt *PDFLineOpt) CapRound() *PDFLineOpt {
opt.capStyle = langext.Ptr(CapRound)
return opt
}
func (opt *PDFLineOpt) Debug(v bool) *PDFLineOpt {
opt.debug = &v
return opt
}
func (b *WPDFBuilder) Line(x1 float64, y1 float64, x2 float64, y2 float64, opts ...*PDFLineOpt) {
var lineWidth *float64
var drawColor *PDFColor
var alphaOverride *dataext.Tuple[float64, PDFBlendMode]
capStyle := CapButt
debug := b.debug
for _, opt := range opts {
lineWidth = langext.CoalesceOpt(opt.lineWidth, lineWidth)
drawColor = langext.CoalesceOpt(opt.drawColor, drawColor)
alphaOverride = langext.CoalesceOpt(opt.alpha, alphaOverride)
capStyle = langext.Coalesce(opt.capStyle, capStyle)
debug = langext.Coalesce(opt.debug, debug)
}
if lineWidth != nil {
old := b.GetLineWidth()
b.SetLineWidth(*lineWidth)
defer func() { b.SetLineWidth(old) }()
}
if drawColor != nil {
oldR, oldG, oldB := b.GetDrawColor()
b.SetDrawColor(drawColor.R, drawColor.G, drawColor.B)
defer func() { b.SetDrawColor(oldR, oldG, oldB) }()
}
if alphaOverride != nil {
oldA, oldBMS := b.b.GetAlpha()
b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2))
defer func() { b.b.SetAlpha(oldA, oldBMS) }()
}
b.b.SetLineCapStyle(string(capStyle))
b.b.Line(x1, y1, x2, y2)
}

230
wpdf/wpdfMultiCell.go Normal file
View File

@@ -0,0 +1,230 @@
package wpdf
import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type PDFMultiCellOpt struct {
width *float64
height *float64
border *PDFBorder
align *PDFTextAlign
fill *bool
fontNameOverride *PDFFontFamily
fontStyleOverride *PDFFontStyle
fontSizeOverride *float64
alphaOverride *dataext.Tuple[float64, PDFBlendMode]
extraLn *float64
x *float64
textColor *PDFColor
borderColor *PDFColor
fillColor *PDFColor
debug *bool
}
func NewPDFMultiCellOpt() *PDFMultiCellOpt {
return &PDFMultiCellOpt{}
}
func (opt *PDFMultiCellOpt) Width(v float64) *PDFMultiCellOpt {
opt.width = &v
return opt
}
func (opt *PDFMultiCellOpt) Height(v float64) *PDFMultiCellOpt {
opt.height = &v
return opt
}
func (opt *PDFMultiCellOpt) Border(v PDFBorder) *PDFMultiCellOpt {
opt.border = &v
return opt
}
func (opt *PDFMultiCellOpt) Align(v PDFTextAlign) *PDFMultiCellOpt {
opt.align = &v
return opt
}
func (opt *PDFMultiCellOpt) FillBackground(v bool) *PDFMultiCellOpt {
opt.fill = &v
return opt
}
func (opt *PDFMultiCellOpt) Font(fontName PDFFontFamily, fontStyle PDFFontStyle, fontSize float64) *PDFMultiCellOpt {
opt.fontNameOverride = &fontName
opt.fontStyleOverride = &fontStyle
opt.fontSizeOverride = &fontSize
return opt
}
func (opt *PDFMultiCellOpt) FontName(v PDFFontFamily) *PDFMultiCellOpt {
opt.fontNameOverride = &v
return opt
}
func (opt *PDFMultiCellOpt) FontStyle(v PDFFontStyle) *PDFMultiCellOpt {
opt.fontStyleOverride = &v
return opt
}
func (opt *PDFMultiCellOpt) FontSize(v float64) *PDFMultiCellOpt {
opt.fontSizeOverride = &v
return opt
}
func (opt *PDFMultiCellOpt) Bold() *PDFMultiCellOpt {
opt.fontStyleOverride = langext.Ptr(Bold)
return opt
}
func (opt *PDFMultiCellOpt) Italic() *PDFMultiCellOpt {
opt.fontStyleOverride = langext.Ptr(Italic)
return opt
}
func (opt *PDFMultiCellOpt) LnAfter(v float64) *PDFMultiCellOpt {
opt.extraLn = &v
return opt
}
func (opt *PDFMultiCellOpt) X(v float64) *PDFMultiCellOpt {
opt.x = &v
return opt
}
func (opt *PDFMultiCellOpt) TextColor(cr, cg, cb int) *PDFMultiCellOpt {
opt.textColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFMultiCellOpt) TextColorHex(c uint32) *PDFMultiCellOpt {
opt.textColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFMultiCellOpt) BorderColor(cr, cg, cb int) *PDFMultiCellOpt {
opt.borderColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFMultiCellOpt) BorderColorHex(c uint32) *PDFMultiCellOpt {
opt.borderColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFMultiCellOpt) FillColor(cr, cg, cb int) *PDFMultiCellOpt {
opt.fillColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFMultiCellOpt) FillColorHex(c uint32) *PDFMultiCellOpt {
opt.fillColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFMultiCellOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFMultiCellOpt {
opt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode}
return opt
}
func (opt *PDFMultiCellOpt) Debug(v bool) *PDFMultiCellOpt {
opt.debug = &v
return opt
}
func (opt *PDFMultiCellOpt) Copy() *PDFMultiCellOpt {
c := *opt
return &c
}
func (b *WPDFBuilder) MultiCell(txt string, opts ...*PDFMultiCellOpt) {
txtTR := b.tr(txt)
width := float64(0)
height := b.cellHeight + b.cellSpacing
border := BorderNone
align := AlignLeft
fill := false
var fontNameOverride *PDFFontFamily
var fontStyleOverride *PDFFontStyle
var fontSizeOverride *float64
var alphaOverride *dataext.Tuple[float64, PDFBlendMode]
extraLn := float64(0)
var x *float64
var textColor *PDFColor
var borderColor *PDFColor
var fillColor *PDFColor
debug := b.debug
for _, opt := range opts {
width = langext.Coalesce(opt.width, width)
height = langext.Coalesce(opt.height, height)
border = langext.Coalesce(opt.border, border)
align = langext.Coalesce(opt.align, align)
fill = langext.Coalesce(opt.fill, fill)
fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride)
fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride)
fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride)
alphaOverride = langext.CoalesceOpt(opt.alphaOverride, alphaOverride)
extraLn = langext.Coalesce(opt.extraLn, extraLn)
x = langext.CoalesceOpt(opt.x, x)
textColor = langext.CoalesceOpt(opt.textColor, textColor)
borderColor = langext.CoalesceOpt(opt.borderColor, borderColor)
fillColor = langext.CoalesceOpt(opt.fillColor, fillColor)
debug = langext.Coalesce(opt.debug, debug)
}
if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil {
oldFontName := b.fontName
oldFontStyle := b.fontStyle
oldFontSize := b.fontSize
newFontName := langext.Coalesce(fontNameOverride, oldFontName)
newFontStyle := langext.Coalesce(fontStyleOverride, oldFontStyle)
newFontSize := langext.Coalesce(fontSizeOverride, oldFontSize)
b.SetFont(newFontName, newFontStyle, newFontSize)
defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }()
}
if textColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetTextColor()
b.SetTextColor(textColor.R, textColor.G, textColor.B)
defer func() { b.SetTextColor(oldColorR, oldColorG, oldColorB) }()
}
if borderColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetDrawColor()
b.SetDrawColor(borderColor.R, borderColor.G, borderColor.B)
defer func() { b.SetDrawColor(oldColorR, oldColorG, oldColorB) }()
}
if fillColor != nil {
oldColorR, oldColorG, oldColorB := b.b.GetFillColor()
b.SetFillColor(fillColor.R, fillColor.G, fillColor.B)
defer func() { b.SetFillColor(oldColorR, oldColorG, oldColorB) }()
}
if alphaOverride != nil {
oldA, oldBMS := b.b.GetAlpha()
b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2))
defer func() { b.b.SetAlpha(oldA, oldBMS) }()
}
if x != nil {
b.b.SetX(*x)
}
xBefore, yBefore := b.b.GetXY()
b.b.MultiCell(width, height, txtTR, string(border), string(align), fill)
if debug {
b.Rect(b.GetPageWidth()-xBefore-b.GetMarginRight(), b.GetY()-yBefore, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0))
}
if extraLn != 0 {
b.b.Ln(extraLn)
}
}

151
wpdf/wpdfRect.go Normal file
View File

@@ -0,0 +1,151 @@
package wpdf
import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type PDFRectOpt struct {
x *float64
y *float64
lineWidth *float64
drawColor *PDFColor
fillColor *PDFColor
alpha *dataext.Tuple[float64, PDFBlendMode]
radiusTL *float64
radiusTR *float64
radiusBR *float64
radiusBL *float64
debug *bool
}
func NewPDFRectOpt() *PDFRectOpt {
return &PDFRectOpt{}
}
func (opt *PDFRectOpt) X(v float64) *PDFRectOpt {
opt.x = &v
return opt
}
func (opt *PDFRectOpt) Y(v float64) *PDFRectOpt {
opt.y = &v
return opt
}
func (opt *PDFRectOpt) LineWidth(v float64) *PDFRectOpt {
opt.lineWidth = &v
return opt
}
func (opt *PDFRectOpt) DrawColor(cr, cg, cb int) *PDFRectOpt {
opt.drawColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFRectOpt) DrawColorHex(c uint32) *PDFRectOpt {
opt.drawColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFRectOpt) FillColor(cr, cg, cb int) *PDFRectOpt {
opt.fillColor = langext.Ptr(rgbToColor(cr, cg, cb))
return opt
}
func (opt *PDFRectOpt) FillColorHex(c uint32) *PDFRectOpt {
opt.fillColor = langext.Ptr(hexToColor(c))
return opt
}
func (opt *PDFRectOpt) Rounded(radius float64) *PDFRectOpt {
opt.radiusTL = &radius
opt.radiusTR = &radius
opt.radiusBR = &radius
opt.radiusBL = &radius
return opt
}
func (opt *PDFRectOpt) RadiusTL(radius float64) *PDFRectOpt {
opt.radiusTL = &radius
return opt
}
func (opt *PDFRectOpt) RadiusTR(radius float64) *PDFRectOpt {
opt.radiusTR = &radius
return opt
}
func (opt *PDFRectOpt) RadiusBL(radius float64) *PDFRectOpt {
opt.radiusBL = &radius
return opt
}
func (opt *PDFRectOpt) RadiusBR(radius float64) *PDFRectOpt {
opt.radiusBR = &radius
return opt
}
func (opt *PDFRectOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFRectOpt {
opt.alpha = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode}
return opt
}
func (opt *PDFRectOpt) Debug(v bool) *PDFRectOpt {
opt.debug = &v
return opt
}
func (b *WPDFBuilder) Rect(w float64, h float64, styleStr PDFRectStyle, opts ...*PDFRectOpt) {
x := b.GetX()
y := b.GetY()
var lineWidth *float64
var drawColor *PDFColor
var fillColor *PDFColor
var alphaOverride *dataext.Tuple[float64, PDFBlendMode]
radiusTL := float64(0)
radiusTR := float64(0)
radiusBR := float64(0)
radiusBL := float64(0)
debug := b.debug
for _, opt := range opts {
x = langext.Coalesce(opt.x, x)
y = langext.Coalesce(opt.y, y)
lineWidth = langext.CoalesceOpt(opt.lineWidth, lineWidth)
drawColor = langext.CoalesceOpt(opt.drawColor, drawColor)
fillColor = langext.CoalesceOpt(opt.fillColor, fillColor)
alphaOverride = langext.CoalesceOpt(opt.alpha, alphaOverride)
radiusTL = langext.Coalesce(opt.radiusTL, radiusTL)
radiusTR = langext.Coalesce(opt.radiusTR, radiusTR)
radiusBR = langext.Coalesce(opt.radiusBR, radiusBR)
radiusBL = langext.Coalesce(opt.radiusBL, radiusBL)
debug = langext.Coalesce(opt.debug, debug)
}
if lineWidth != nil {
old := b.GetLineWidth()
b.SetLineWidth(*lineWidth)
defer func() { b.SetLineWidth(old) }()
}
if drawColor != nil {
oldR, oldG, oldB := b.GetDrawColor()
b.SetDrawColor(drawColor.R, drawColor.G, drawColor.B)
defer func() { b.SetDrawColor(oldR, oldG, oldB) }()
}
if fillColor != nil {
oldR, oldG, oldB := b.GetFillColor()
b.SetFillColor(fillColor.R, fillColor.G, fillColor.B)
defer func() { b.SetFillColor(oldR, oldG, oldB) }()
}
if alphaOverride != nil {
oldA, oldBMS := b.b.GetAlpha()
b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2))
defer func() { b.b.SetAlpha(oldA, oldBMS) }()
}
b.b.RoundedRectExt(x, y, w, h, radiusTL, radiusTR, radiusBR, radiusBL, string(styleStr))
}

341
wpdf/wpdfTable.go Normal file
View File

@@ -0,0 +1,341 @@
package wpdf
import (
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"regexp"
"strconv"
)
// Column specifier:
//
// - `{number}`: Use this amount of space
// - `auto`: Use the needed space for the content
// - `*` / `fr`: Use the remaining space, evenly distributed, shrink down to auto
// - `{num}fr` / `{num}*`: Use the remaining space, evenly distributed (weighted), shrink down to auto
//
// # TableBuilder
// - PadX/PadY: Padding between cells
// - DefaultStyle: Default style for cells
//
// # TableCellStyleOpt
// - MultiCell: Use wpdf.MultiCell() instead of wpdf.Cell() --> supports linebreaks
// - Ellipsize: Ellipsize text if too long
// - PaddingHorz: Additional horizontal padding inside of cell to space text around
// - PDFCellOpt: Normal styling options (evtl not all are supported, depending on MultiCell: true/false)
var regexTableColumnSpecFr = rext.W(regexp.MustCompile(`^(?P<num>[0-9]*)(fr|\*)$`))
type TableBuilder struct {
builder *WPDFBuilder
padx float64
pady float64
rows []tableRow
defaultCellStyle *TableCellStyleOpt
columnWidths *[]string
debug *bool
}
func (r tableRow) maxFontSize(defaultFontSize float64) float64 {
mfs := defaultFontSize
for _, cell := range r.cells {
if cell.Style.fontSizeOverride != nil {
mfs = max(mfs, *cell.Style.fontSizeOverride)
}
}
return mfs
}
func (b *TableBuilder) Widths(v ...string) *TableBuilder {
b.columnWidths = &v
return b
}
func (b *TableBuilder) DefaultStyle(s *TableCellStyleOpt) *TableBuilder {
b.defaultCellStyle = s
return b
}
func (b *TableBuilder) PadX(v float64) *TableBuilder {
b.padx = v
return b
}
func (b *TableBuilder) PadY(v float64) *TableBuilder {
b.pady = v
return b
}
func (b *TableBuilder) AddRow(cells ...TableCell) *TableBuilder {
b.rows = append(b.rows, tableRow{cells: cells})
return b
}
func (b *TableBuilder) AddRowWithStyle(style *TableCellStyleOpt, cells ...string) *TableBuilder {
tcels := make([]TableCell, 0, len(cells))
for _, cell := range cells {
tcels = append(tcels, TableCell{Content: cell, Style: *style})
}
b.rows = append(b.rows, tableRow{cells: tcels})
return b
}
func (b *TableBuilder) AddRowDefaultStyle(cells ...string) *TableBuilder {
tcels := make([]TableCell, 0, len(cells))
for _, cell := range cells {
tcels = append(tcels, TableCell{Content: cell, Style: langext.Coalesce(b.defaultCellStyle, TableCellStyleOpt{})})
}
b.rows = append(b.rows, tableRow{cells: tcels})
return b
}
func (b *TableBuilder) BuildRow() *TableRowBuilder {
return &TableRowBuilder{tabbuilder: b, cells: make([]TableCell, 0)}
}
func (b *TableBuilder) Build() {
builder := b.builder
debug := langext.Coalesce(b.debug, b.builder.debug)
if len(b.rows) == 0 {
return // nothing to do
}
_, pageHeight := builder.FPDF().GetPageSize()
pbEnabled, pbMargin := builder.FPDF().GetAutoPageBreak()
builder.FPDF().SetAutoPageBreak(false, 0) // manually handle pagebreak in tables
defer func() { builder.FPDF().SetAutoPageBreak(pbEnabled, pbMargin) }()
columnWidths := b.calculateColumns()
columnCount := len(columnWidths)
for i, dat := range b.rows {
if len(dat.cells) != columnCount {
builder.FPDF().SetError(exerr.New(exerr.TypeInternal, "data must have the same length as header").Int("idx", i).Build())
return
}
}
defaultFontSize, _ := builder.FPDF().GetFontSize()
for rowIdx, row := range b.rows {
nextY := builder.GetY()
for cellIdx, cell := range row.cells {
str := cell.Content
style := cell.Style
ellipsize := langext.Coalesce(style.ellipsize, true)
cellPaddingHorz := langext.Coalesce(style.paddingHorz, 2)
fillHeight := langext.Coalesce(style.fillHeight, false)
bx := builder.GetX()
by := builder.GetY()
cellWidth := columnWidths[cellIdx]
_ = fillHeight // TODO implement, but how?? ( cells with fillHeight=true should have a border of the full column height, even if another column is growing it, but we do not know teh height beforehand ... )
if langext.Coalesce(style.multiCell, true) {
builder.MultiCell(str, style.PDFCellOpt.Copy().ToMulti().Width(cellWidth).Debug(debug))
} else {
if ellipsize {
if builder.GetStringWidth(str, style.PDFCellOpt) > (cellWidth - cellPaddingHorz) {
for builder.GetStringWidth(str+"...", style.PDFCellOpt) > (cellWidth-cellPaddingHorz) && len(str) > 0 {
str = str[:len(str)-1]
}
str += "..."
}
}
builder.Cell(str, style.PDFCellOpt.Copy().Width(cellWidth).Debug(debug))
}
nextY = max(nextY, builder.GetY())
builder.SetXY(bx+cellWidth+b.padx, by)
}
builder.SetY(nextY + b.pady)
if rowIdx < len(b.rows)-1 && pbEnabled && (builder.GetY()+b.rows[rowIdx+1].maxFontSize(defaultFontSize)) > (pageHeight-pbMargin) {
builder.FPDF().AddPage()
}
}
}
func (b *TableBuilder) calculateColumns() []float64 {
pageWidthTotal, _ := b.builder.FPDF().GetPageSize()
marginLeft, _, marginRight, _ := b.builder.FPDF().GetMargins()
pageWidth := pageWidthTotal - marginLeft - marginRight
columnDef := make([]string, 0)
if b.columnWidths != nil {
columnDef = *b.columnWidths
} else if len(b.rows) > 0 {
columnDef = make([]string, len(b.rows[0].cells))
for i := range columnDef {
columnDef[i] = "*"
}
} else {
return []float64{}
}
columnWidths := make([]float64, len(columnDef))
frColumnWidthCount := 0
frColumnWeights := make([]float64, len(columnDef))
remainingWidth := pageWidth - (float64(len(columnDef)-1) * b.padx)
autoWidths := make([]float64, len(columnDef))
for colIdx := range columnDef {
w := float64(0)
for _, row := range b.rows {
if len(row.cells) > colIdx {
w = max(w, b.builder.GetStringWidth(row.cells[colIdx].Content, row.cells[colIdx].Style.PDFCellOpt))
}
}
autoWidths[colIdx] = w
}
for colIdx, col := range columnDef {
maxPadHorz := float64(0)
minWidth := float64(0)
for _, row := range b.rows {
if len(row.cells) > colIdx {
ph := langext.Coalesce(row.cells[colIdx].Style.paddingHorz, 2)
mw := langext.Coalesce(row.cells[colIdx].Style.minWidth, 0)
minWidth = max(minWidth, ph+mw)
maxPadHorz = max(maxPadHorz, ph)
}
}
if col == "auto" {
w := max(autoWidths[colIdx]+maxPadHorz, minWidth)
columnWidths[colIdx] = w
remainingWidth -= w
} else if match, ok := regexTableColumnSpecFr.MatchFirst(col); ok {
if match.GroupByName("num").Value() == "" {
w := minWidth
frColumnWidthCount += 1
frColumnWeights[colIdx] = 1
columnWidths[colIdx] = w
remainingWidth -= w
} else {
w := minWidth
n, _ := strconv.Atoi(match.GroupByName("num").Value())
frColumnWidthCount += n
frColumnWeights[colIdx] = float64(n)
columnWidths[colIdx] = w
remainingWidth -= w
}
} else {
if w, err := strconv.ParseFloat(col, 64); err == nil {
w = max(w, minWidth)
columnWidths[colIdx] = w
remainingWidth -= w
} else {
b.builder.FPDF().SetError(exerr.New(exerr.TypeInternal, "invalid column width").Str("width", col).Build())
w = max(w, minWidth)
columnWidths[colIdx] = w
remainingWidth -= w
return nil
}
}
}
if remainingWidth < 0 {
// no remaining space to distribute
return columnWidths
}
{
rmSub := 0.0
for i := range columnDef {
if frColumnWeights[i] != 0 {
w := min(autoWidths[i], (remainingWidth/float64(frColumnWidthCount))*frColumnWeights[i])
rmSub += w - columnWidths[i]
columnWidths[i] = w
}
}
remainingWidth -= rmSub
}
if remainingWidth > 0.01 {
rmSub := 0.0
for i, _ := range columnDef {
if frColumnWeights[i] != 0 {
addW := (remainingWidth / float64(frColumnWidthCount)) * frColumnWeights[i]
rmSub += addW
columnWidths[i] += addW
}
}
remainingWidth -= rmSub
}
return columnWidths
}
func (b *TableBuilder) RowCount() int {
return len(b.rows)
}
func (b *TableBuilder) Debug(v bool) *TableBuilder {
b.debug = &v
return b
}
func (b *WPDFBuilder) Table() *TableBuilder {
return &TableBuilder{
builder: b,
rows: make([]tableRow, 0),
pady: 2,
padx: 2,
defaultCellStyle: defaultTableStyle(),
}
}
func defaultTableStyle() *TableCellStyleOpt {
return &TableCellStyleOpt{
PDFCellOpt: *NewPDFCellOpt().
FontSize(float64(8)).
Border(BorderFull).
BorderColorHex(uint32(0x666666)).
FillColorHex(uint32(0xF0F0F0)).
TextColorHex(uint32(0x000000)).
FillBackground(true),
minWidth: langext.Ptr(float64(5)),
ellipsize: langext.PTrue,
multiCell: langext.PFalse,
}
}

Some files were not shown because too many files have changed in this diff Show More