Compare commits

...

128 Commits

Author SHA1 Message Date
a7389f44fa v0.0.525 upgrad go1.22 -> go1.23
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m4s
2024-10-05 02:45:20 +02:00
69f0fedd66 v0.0.524
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2024-10-05 01:41:10 +02:00
335ef4d8e8 v0.0.523 ringbuffer
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m23s
2024-10-05 01:28:46 +02:00
61801ff20d v0.0.522
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m53s
2024-10-05 01:12:00 +02:00
361dca5c85 v0.0.521 ctxext
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m56s
2024-10-05 01:06:36 +02:00
9f85a243e8 v0.0.520
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m7s
2024-10-05 01:02:25 +02:00
dc6cb274ee v0.0.519
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-05 00:58:15 +02:00
f6b47792a4 v0.0.518 Improve sq db-listener interface (breaking)
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m11s
2024-10-05 00:45:55 +02:00
295b3ef793 v0.0.517 add constructor funcs for tuples
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m21s
2024-10-02 11:31:34 +02:00
721c176337 v0.0.516
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m25s
2024-09-25 21:43:41 +02:00
ebba6545a3 v0.0.515
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m35s
2024-09-16 17:39:51 +02:00
19c7e22ced v0.0.514 fix mongo filter where the primary sort key is null in db (fallback to secondary)
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-09-16 17:39:18 +02:00
9f883b458f v0.0.513
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m55s
2024-09-16 15:27:32 +02:00
1f456c5134 v0.0.512
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m6s
2024-09-15 21:25:21 +02:00
d7fbef37db v0.0.511
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m10s
2024-09-15 18:22:07 +02:00
a1668b6e5a v0.0.510
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m24s
2024-09-13 18:06:49 +02:00
3a17edfaf0 v0.0.509
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 6m2s
2024-08-26 14:35:49 +02:00
3320a9c19d v0.0.508
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m25s
2024-08-25 17:36:20 +02:00
8dcd8a270a v0.0.507 fix jsonfilter:"-" not working
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 7m7s
2024-08-25 15:41:17 +02:00
03a9b276d8 v0.0.506 allow empty-string as value for enum
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 7m36s
2024-08-22 11:45:02 +02:00
9c8cde384f v0.0.505
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 6m17s
2024-08-08 15:57:05 +02:00
99b000ecf4 v0.0.504
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m53s
2024-08-07 19:44:45 +02:00
a173e30090 v0.0.503
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m52s
2024-08-07 19:37:38 +02:00
a3481a7d2d v0.0.502
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:35:23 +02:00
a8e6f98a89 v0.0.501
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:31:36 +02:00
ab805403b9 v0.0.500
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 19:30:38 +02:00
1e98d351ce v0.0.499
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m24s
2024-08-07 18:34:22 +02:00
c40bdc8e9e v0.0.498
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m13s
2024-08-07 17:26:35 +02:00
7204562879 v0.0.497
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m33s
2024-08-07 17:04:59 +02:00
741611a2e1 v0.0.496 wpdf fixes and wpdf test.go
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m58s
2024-08-07 15:34:06 +02:00
133aeb8374 v0.0.495
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m44s
2024-08-07 14:00:02 +02:00
b78a468632 v0.0.494 add tables to wpdf
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-08-07 13:57:29 +02:00
f1b4480e0f v0.0.493 fix panic in RegisterImage for very short images
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m22s
2024-08-07 09:22:37 +02:00
ffffe4bf24 v0.0.492
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 5m32s
2024-08-02 16:19:21 +02:00
413bf3c848 v0.0.491 small optimization in Paginate method
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 8m31s
2024-07-31 00:15:09 +02:00
646990b549 v0.0.490 documentation and extra-params in exerr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m2s
2024-07-27 23:44:18 +02:00
e5818146a8 v0.0.489
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m54s
2024-07-23 14:21:03 +02:00
1310054121 v0.0.488 fix wpdf with 16bpp images
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m9s
2024-07-22 15:16:28 +02:00
49d423915c v0.0.487
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m54s
2024-07-18 17:45:56 +02:00
1962cb3c52 v0.0.486 add ginext -> CorsAllowHeader
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m51s
2024-07-18 17:29:18 +02:00
84f124dd4d v0.0.485
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m44s
2024-07-16 15:22:18 +02:00
ff8e066135 v0.0.484
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m58s
2024-07-16 15:16:56 +02:00
bc5c61e43d v0.0.483
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m37s
2024-07-16 15:08:37 +02:00
6ded615723 v0.0.482 mathext.Percentile
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m46s
2024-07-12 16:33:42 +02:00
abc8af525a v0.0.481
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m50s
2024-07-04 16:24:49 +02:00
19d943361b v0.0.480
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m20s
2024-07-02 11:32:22 +02:00
b464afae01 v0.0.479 AccessStruct
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m36s
2024-07-02 11:29:47 +02:00
56bc5e8285 v0.0.478
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m41s
2024-07-01 17:23:00 +02:00
cb95bb561c v0.0.477 add langext.StrWrap
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m4s
2024-06-29 15:36:39 +02:00
dff8941bd3 v0.0.476 Ãproperly close cursor in wmo
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m44s
2024-06-28 18:37:02 +02:00
78e1c33e30 v0.0.475 ArrGroupBy
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m36s
2024-06-16 17:14:21 +02:00
d2f2a0558a v0.0.474 Add ZeroLogger config field to exerr.Init to override used zerolog instance
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m27s
2024-06-14 23:18:58 +02:00
fc4bed4b9f v0.0.473 add ctx to wmo.FilterQuery|Sort|Pagination
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m23s
2024-06-14 17:24:59 +02:00
julian
94a7bf250d v0.0.472 changed gin engine initialization
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m34s
2024-06-14 14:56:41 +02:00
f6121a6961 v0.0.471 Revert "v0.0.470 Add GoextJsonMarshaller interface to call when marshalling json via gojson"
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m50s
2024-06-11 19:39:43 +02:00
7fc73f1e93 v0.0.470 Add GoextJsonMarshaller interface to call when marshalling json via gojson
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m51s
2024-06-11 19:34:48 +02:00
2504ef00a0 v0.0.469
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m26s
2024-06-11 12:10:49 +02:00
fc5803493c added DblPtrIfNotNil
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m31s
2024-06-05 17:53:57 +02:00
a9295bfabf added CoalesceDblPtr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m37s
2024-06-05 15:10:31 +02:00
12fa53d848 v0.0.466 exerr.Wrap now inherits the Severity of the wrapped error
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m24s
2024-06-03 13:48:30 +02:00
d2bb362135 v0.0.465
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m39s
2024-06-03 09:39:57 +02:00
9dd81f6bd5 v0.0.464 improve ZeroLogErrTraces/ZeroLogAllTraces output for empty-message wrapped exerrs
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m36s
2024-06-01 02:40:48 +02:00
d2c04afcd5 v0.0.463 Fix SubtractYears
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m29s
2024-05-29 20:20:01 +02:00
62980e1489 v0.0.462
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m32s
2024-05-23 14:37:05 +02:00
59963adf74 v0.0.461
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m17s
2024-05-20 00:52:49 +02:00
194ea4ace5 v0.0.460
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m16s
2024-05-20 00:38:04 +02:00
73b80a66bc v0.0.459
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m16s
2024-05-20 00:20:31 +02:00
d8b2d01274 v0.0.458 revert 457 and fix ObjectFitImage
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m49s
2024-05-20 00:15:24 +02:00
bfa8457e95 v0.0.457 test
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m38s
2024-05-20 00:07:33 +02:00
70106733d9 v0.0.456
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m38s
2024-05-18 23:38:47 +02:00
ce7837b9ef v0.0.455 add proper json/bson marshalling to exerr [severity|type|category]
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m39s
2024-05-16 15:38:42 +02:00
d0d72167eb v0.0.454
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m17s
2024-05-14 15:10:27 +02:00
a55ee1a6ce v0.0.453
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m18s
2024-05-14 14:57:10 +02:00
dfc319573c v0.0.452
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m16s
2024-05-14 12:48:43 +02:00
246e555f3f v0.0.451 wpdf image processing
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-05-14 12:46:49 +02:00
c28bc086b2 v0.0.450 wpdf
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m33s
2024-05-14 11:52:56 +02:00
d44e971325 v0.0.449
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m15s
2024-05-12 16:51:52 +02:00
fe4cdc48af v0.0.448 wmo marshalHook
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 25s
2024-05-12 16:45:45 +02:00
631006a4e1 v0.0.447
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m18s
2024-05-10 21:33:01 +02:00
567ead8697 v0.0.446 syncMap.GetAndSetIfNotContains and CASMutex
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-05-10 21:31:36 +02:00
e4886b4a7d v0.0.445 added CtxData() and ExtendGinMeta/ExtendContextMeta to exerr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m55s
2024-05-03 15:28:53 +02:00
dcb5d3d7cd v0.0.444 change gin values in exerr auto meta to not include dots in keys (fucks up mongo)
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m9s
2024-05-03 13:24:08 +02:00
15a639f85a v0.0.443 fix wmo.List with pageSize==0
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m9s
2024-05-03 11:56:29 +02:00
303bd04649 v0.0.442
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m0s
2024-04-29 17:24:10 +02:00
7bda674939 v0.0.441
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m29s
2024-04-29 17:19:55 +02:00
126d4fbd0b v0.0.440 improve exerr.toJson
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 2m54s
2024-04-29 16:03:58 +02:00
fed8bccaab v0.0.439
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m38s
2024-04-25 11:47:16 +02:00
47b6a6b508 v0.0.438
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m20s
2024-04-25 11:40:01 +02:00
764ce79a71 v0.0.437 properly handle $group in wmo extraModPipeline
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 3m23s
2024-04-23 16:12:17 +02:00
b876c64ba2 v0.0.436
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m56s
2024-04-18 14:09:26 +02:00
8d52b41f57 v0.0.435 add ConvertStructToMapOpt.MaxDepth
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m39s
2024-04-15 12:55:44 +02:00
f47e2a33fe v0.0.434
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m13s
2024-04-15 10:43:26 +02:00
9321938dad v0.0.433 fix exerr missing gindata when using ginext.Error and add config for Output logging to stderr
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 4m0s
2024-04-15 10:25:30 +02:00
3828d601a2 v0.0.432 better handling of unmarshall-able values in exerr meta
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-04-13 22:08:45 +02:00
2e713c808d v0.0.431
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m10s
2024-04-10 15:29:59 +02:00
6602f86b43 v0.0.430
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-04-10 15:27:41 +02:00
24d9f0fdc7 v0.0.429
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m23s
2024-04-08 16:33:44 +02:00
8446b2da22 v0.0.428
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-04-08 16:32:34 +02:00
758e5a67b5 v0.0.427
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m30s
2024-04-07 15:10:21 +02:00
678ddd7124 v0.0.426 fix JsonOpt
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m57s
2024-04-01 16:03:00 +02:00
36b71dfaf3 v0.0.425 ArrAppend
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m25s
2024-03-30 14:24:53 +01:00
9491b72b8d v0.0.424 timeext.SubtractYears
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m29s
2024-03-30 03:01:55 +01:00
6c4af4006b v0.0.423 fix createPaginationPipeline - different primary and secondary sort keys broke mongo ??!?? - it actually only sorted by the secondary condition (ignoring the primary?)
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m32s
2024-03-24 15:25:52 +01:00
8bf3a337cf v0.0.422
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m29s
2024-03-23 20:29:46 +01:00
16146494dc v0.0.421
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 32s
2024-03-23 20:28:51 +01:00
b0e443ad99 v0.0.420
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 36s
2024-03-23 18:01:41 +01:00
9955eacf96 v0.0.419 JsonOpt
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 39s
2024-03-23 17:49:56 +01:00
f0347a9435 v0.0.418 fix tests?
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m1s
2024-03-20 09:42:06 +01:00
7c869c65f3 v0.0.417 add GinWrapper.ForwardRequest
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m26s
2024-03-20 08:58:59 +01:00
14f39a9162 v0.0.416
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m43s
2024-03-18 11:19:01 +01:00
dcd106c1cd v0.0.415 add 'tagkey' to gojson.Decoder
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m45s
2024-03-18 10:42:00 +01:00
b704e2a362 v0.0.414 fix rfctime.Date bson marshalling for zero value
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m28s
2024-03-16 19:42:59 +01:00
6b4bd5a6f8 v0.0.413 fix tests
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m33s
2024-03-11 21:00:30 +01:00
6df4f5f2a1 v0.0.412 fix GenerateIDSpecs accepting nil for opt
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m32s
2024-03-11 20:58:06 +01:00
780905ba35 v0.0.411
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m33s
2024-03-11 20:43:37 +01:00
c679797765 v0.0.410 add ginext.SuppressGinLogs
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-03-11 20:42:12 +01:00
401aad9fa4 v0.0.409
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m41s
2024-03-11 17:05:10 +01:00
645113d553 v0.0.408
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m43s
2024-03-11 16:41:47 +01:00
4a33986b6a v0.0.407 sq.Iterate
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-03-11 16:40:41 +01:00
c1c8c64c76 v0.0.406 bf
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m34s
2024-03-10 16:44:21 +01:00
0927fdc4d7 v0.0.405
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m36s
2024-03-10 15:28:26 +01:00
102a280dda v0.0.404
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m37s
2024-03-10 15:25:30 +01:00
f13384d794 v0.0.403 bf
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m37s
2024-03-10 12:58:59 +01:00
409d6e108d v0.0.402 add PackageName() and TypeName() to enums_codegen
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-03-10 12:49:31 +01:00
ed53f297bd v0.0.401 bf
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m27s
2024-03-09 15:07:03 +01:00
42424f4bc2 v0.0.400 added CommentTrimmer and DBOptions to sq
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m16s
2024-03-09 14:59:32 +01:00
9e5b8c5277 v0.0.399 added sq.NewAutoDBTypeConverter
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 1m25s
2024-03-09 14:16:35 +01:00
9abe28c490 v0.0.398 added As* version to sort functions
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m13s
2024-03-09 13:36:06 +01:00
124 changed files with 8008 additions and 1224 deletions

2
.idea/.gitignore generated vendored
View File

@@ -6,3 +6,5 @@
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml
# GitHub Copilot persisted chat sessions
/copilot/chatSessions

View File

@@ -8,7 +8,7 @@ This should not have any heavy dependencies (gin, mongo, etc) and add missing ba
Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
### Packages:
## Packages:
| Name | Maintainer | Description |
|-------------|------------|---------------------------------------------------------------------------------------------------------------|
@@ -20,8 +20,9 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
| zipext | Mike | Utility for zip/gzip/tar etc |
| reflectext | Mike | Utility for golang reflection |
| fsext | Mike | Utility for filesytem access |
| ctxext | Mike | Utility for context.Context |
| | | |
| mongoext | Mike | Utility/Helper functions for mongodb |
| mongoext | Mike | Utility/Helper functions for mongodb (kinda abandoned) |
| cursortoken | Mike | MongoDB cursortoken implementation |
| pagination | Mike | Pagination implementation |
| | | |
@@ -30,7 +31,7 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
| confext | Mike | Parses environment configuration into structs |
| cmdext | Mike | Runner for external commands/processes |
| | | |
| sq | Mike | Utility functions for sql based databases |
| sq | Mike | Utility functions for sql based databases (primarily sqlite) |
| tst | Mike | Utility functions for unit tests |
| | | |
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
@@ -42,4 +43,69 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
| | | |
| scn | Mike | SimpleCloudNotifier |
| | | |
| | | |
## Usage:
### exerr
- see **mongoext/builder.go** for full info
Short summary:
- An better error package with metadata, listener, api-output and error-traces
- Initialize with `exerr.Init()`
- *Never* return `err` direct, always use exerr.Wrap(err, "...") - add metadata where applicable
- at the end either Print(), Fatal() or Output() your error (print = stdout, fatal = panic, output = json API response)
- You can add listeners with exerr.RegisterListener(), and save the full errors to a db or smth
### wmo
- A typed wrapper around the official mongo-go-driver
- Use `wmo.W[...](...)` to wrap the collections and type-ify them
- The new collections have all the usual methods, but types
- Also they have List() and Paginate() methods for paginated listings (witehr with a cursortoken or page/limit)
- Register additional hooks with `WithDecodeFunc`, `WithUnmarshalHook`, `WithMarshalHook`, `WithModifyingPipeline`, `WithModifyingPipelineFunc`
- List(), Paginate(), etc support filter interfaces
- Rule(s) of thumb:
- filter the results in the filter interface
- sort the results in the sort function of the filter interface
- add joins ($lookup's) in the `WithModifyingPipelineFunc`/`WithModifyingPipeline`
#### ginext
- A wrapper around gin-gonic/gin
- create the gin engine with `ginext.NewEngine`
- Add routes with `engine.Routes()...`
- `.Use(..)` adds a middleware
- `.Group(..)` adds a group
- `.Get().Handle(..)` adds a handler
- Handler return values (in contract to ginext) - values implement the `ginext.HTTPResponse` interface
- Every handler starts with something like:
```go
func (handler Handler) CommunityMetricsValues(pctx ginext.PreContext) ginext.HTTPResponse {
type communityURI struct {
Version string `uri:"version"`
CommunityID models.CommunityID `uri:"cid"`
}
type body struct {
UserID models.UserID `json:"userID"`
EventID models.EventID `json:"eventID"`
}
var u uri
var b body
ctx, gctx, httpErr := pctx.URI(&u).Body(&b).Start() // can have more unmarshaller, like header, form, etc
if httpErr != nil {
return *httpErr
}
defer ctx.Cancel()
// do stuff
}
```
#### sq
- TODO (like mongoext for sqlite/sql databases)

View File

@@ -2,6 +2,8 @@
- cronext
- rfctime.DateOnly
- rfctime.HMSTimeOnly
- rfctime.NanoTimeOnly
- rfctime.NanoTimeOnly
- remove sqlx dependency from sq (unmaintained, and mostly superseeded by our own stuff?)
- Move DBLogger and DBPreprocessor to sq

View File

@@ -26,6 +26,10 @@ type CSIDDef struct {
Prefix string
}
type CSIDGenOptions struct {
DebugOutput *bool
}
var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`))
@@ -35,7 +39,9 @@ var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGen
//go:embed csid-generate.template
var templateCSIDGenerateText string
func GenerateCharsetIDSpecs(sourceDir string, destFile string) error {
func GenerateCharsetIDSpecs(sourceDir string, destFile string, opt CSIDGenOptions) error {
debugOutput := langext.Coalesce(opt.DebugOutput, false)
files, err := os.ReadDir(sourceDir)
if err != nil {
@@ -81,13 +87,18 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error {
pkgname := ""
for _, f := range files {
fmt.Printf("========= %s =========\n\n", f.Name())
fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()))
if debugOutput {
fmt.Printf("========= %s =========\n\n", f.Name())
}
fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput)
if err != nil {
return err
}
fmt.Printf("\n")
if debugOutput {
fmt.Printf("\n")
}
allIDs = append(allIDs, fileIDs...)
@@ -113,7 +124,7 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error {
return nil
}
func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) {
func processCSIDFile(basedir string, fn string, debugOutput bool) ([]CSIDDef, string, error) {
file, err := os.Open(fn)
if err != nil {
return nil, "", err
@@ -155,7 +166,11 @@ func processCSIDFile(basedir string, fn string) ([]CSIDDef, string, error) {
Name: match.GroupByName("name").Value(),
Prefix: match.GroupByName("prefix").Value(),
}
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
if debugOutput {
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
}
ids = append(ids, def)
}
}

View File

@@ -34,10 +34,10 @@ func TestGenerateCSIDSpecs(t *testing.T) {
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
tst.AssertNoErr(t, err)
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go")
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue})
tst.AssertNoErr(t, err)
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go")
err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue})
tst.AssertNoErr(t, err)
fmt.Println()

View File

@@ -37,18 +37,23 @@ type EnumDef struct {
Values []EnumDefVal
}
type EnumGenOptions struct {
DebugOutput *bool
GoFormat *bool
}
var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<comm>.*))?.*$`))
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]*"|[0-9]+))\s*(//(?P<comm>.*))?.*$`))
var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
//go:embed enum-generate.template
var templateEnumGenerateText string
func GenerateEnumSpecs(sourceDir string, destFile string) error {
func GenerateEnumSpecs(sourceDir string, destFile string, opt EnumGenOptions) error {
oldChecksum := "N/A"
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
@@ -61,7 +66,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
}
}
gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, true)
gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, langext.Coalesce(opt.GoFormat, true), langext.Coalesce(opt.DebugOutput, false))
if err != nil {
return err
}
@@ -78,7 +83,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
return nil
}
func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool) (string, string, bool, error) {
func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool, debugOutput bool) (string, string, bool, error) {
files, err := os.ReadDir(sourceDir)
if err != nil {
@@ -113,13 +118,18 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g
pkgname := ""
for _, f := range files {
fmt.Printf("========= %s =========\n\n", f.Name())
fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()))
if debugOutput {
fmt.Printf("========= %s =========\n\n", f.Name())
}
fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput)
if err != nil {
return "", "", false, err
}
fmt.Printf("\n")
if debugOutput {
fmt.Printf("\n")
}
allEnums = append(allEnums, fileEnums...)
@@ -146,7 +156,7 @@ func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, g
return string(fdata), newChecksum, true, nil
}
func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
func processEnumFile(basedir string, fn string, debugOutput bool) ([]EnumDef, string, error) {
file, err := os.Open(fn)
if err != nil {
return nil, "", err
@@ -190,7 +200,10 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
Values: make([]EnumDefVal, 0),
}
enums = append(enums, def)
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
if debugOutput {
fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type)
}
}
if match, ok := rexEnumValueDef.MatchFirst(line); ok {
@@ -230,16 +243,21 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
if v.EnumTypeName == typename {
enums[i].Values = append(enums[i].Values, def)
found = true
if def.Description != nil {
fmt.Printf("Found enum value [%s] for '%s' ('%s')\n", def.Value, def.VarName, *def.Description)
} else {
fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName)
if debugOutput {
if def.Description != nil {
fmt.Printf("Found enum value [%s] for '%s' ('%s')\n", def.Value, def.VarName, *def.Description)
} else {
fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName)
}
}
break
}
}
if !found {
fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName)
if debugOutput {
fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName)
}
}
}
}

View File

@@ -7,6 +7,8 @@ import "gogs.mikescher.com/BlackForestBytes/goext/enums"
const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
{{ $pkgname := .PkgName }}
{{range .Enums}}
{{ $hasStr := ( . | hasStr ) }}
@@ -97,6 +99,14 @@ func (e {{.EnumTypeName}}) VarName() string {
return ""
}
func (e {{.EnumTypeName}}) TypeName() string {
return "{{$typename}}"
}
func (e {{.EnumTypeName}}) PackageName() string {
return "{{$pkgname }}"
}
func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue {
{{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}}
}
@@ -134,4 +144,12 @@ func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue {
}
{{end}}
{{end}}
{{end}}
// ================================ ================= ================================
func AllPackageEnums() []enums.Enum {
return []enums.Enum{ {{range .Enums}}
{{ if gt (len .Values) 0 }} {{ $v := index .Values 0 }} {{ $v.VarName}}, {{end}} // {{ .EnumTypeName }} {{end}}
}
}

View File

@@ -37,10 +37,10 @@ func TestGenerateEnumSpecs(t *testing.T) {
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
tst.AssertNoErr(t, err)
s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true)
s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true)
tst.AssertNoErr(t, err)
s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true)
s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true)
tst.AssertNoErr(t, err)
tst.AssertEqual(t, cs1, cs2)
@@ -76,7 +76,7 @@ func TestGenerateEnumSpecsData(t *testing.T) {
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
tst.AssertNoErr(t, err)
s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true)
s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true, true)
tst.AssertNoErr(t, err)
fmt.Println()

View File

@@ -25,6 +25,10 @@ type IDDef struct {
Name string
}
type IDGenOptions struct {
DebugOutput *bool
}
var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`))
var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`))
@@ -34,7 +38,9 @@ var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator =
//go:embed id-generate.template
var templateIDGenerateText string
func GenerateIDSpecs(sourceDir string, destFile string) error {
func GenerateIDSpecs(sourceDir string, destFile string, opt IDGenOptions) error {
debugOutput := langext.Coalesce(opt.DebugOutput, false)
files, err := os.ReadDir(sourceDir)
if err != nil {
@@ -80,13 +86,18 @@ func GenerateIDSpecs(sourceDir string, destFile string) error {
pkgname := ""
for _, f := range files {
fmt.Printf("========= %s =========\n\n", f.Name())
fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()))
if debugOutput {
fmt.Printf("========= %s =========\n\n", f.Name())
}
fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput)
if err != nil {
return err
}
fmt.Printf("\n")
if debugOutput {
fmt.Printf("\n")
}
allIDs = append(allIDs, fileIDs...)
@@ -112,7 +123,7 @@ func GenerateIDSpecs(sourceDir string, destFile string) error {
return nil
}
func processIDFile(basedir string, fn string) ([]IDDef, string, error) {
func processIDFile(basedir string, fn string, debugOutput bool) ([]IDDef, string, error) {
file, err := os.Open(fn)
if err != nil {
return nil, "", err
@@ -153,7 +164,11 @@ func processIDFile(basedir string, fn string) ([]IDDef, string, error) {
FileRelative: rfp,
Name: match.GroupByName("name").Value(),
}
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
if debugOutput {
fmt.Printf("Found ID definition { '%s' }\n", def.Name)
}
ids = append(ids, def)
}
}

View File

@@ -34,10 +34,10 @@ func TestGenerateIDSpecs(t *testing.T) {
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
tst.AssertNoErr(t, err)
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go")
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", IDGenOptions{DebugOutput: langext.PTrue})
tst.AssertNoErr(t, err)
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go")
err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", IDGenOptions{DebugOutput: langext.PTrue})
tst.AssertNoErr(t, err)
fmt.Println()

27
ctxext/getter.go Normal file
View File

@@ -0,0 +1,27 @@
package ctxext
import "context"
func Value[T any](ctx context.Context, key any) (T, bool) {
v := ctx.Value(key)
if v == nil {
return *new(T), false
}
if tv, ok := v.(T); !ok {
return *new(T), false
} else {
return tv, true
}
}
func ValueOrDefault[T any](ctx context.Context, key any, def T) T {
v := ctx.Value(key)
if v == nil {
return def
}
if tv, ok := v.(T); !ok {
return def
} else {
return tv
}
}

View File

@@ -1,14 +1,15 @@
package cursortoken
import (
"context"
"go.mongodb.org/mongo-driver/mongo"
)
type RawFilter interface {
FilterQuery() mongo.Pipeline
FilterQuery(ctx context.Context) mongo.Pipeline
}
type Filter interface {
FilterQuery() mongo.Pipeline
Pagination() (string, SortDirection, string, SortDirection)
FilterQuery(ctx context.Context) mongo.Pipeline
Pagination(ctx context.Context) (string, SortDirection, string, SortDirection)
}

View File

@@ -3,8 +3,8 @@ package cursortoken
import (
"encoding/base32"
"encoding/json"
"errors"
"go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"strings"
"time"
)
@@ -127,7 +127,7 @@ func Decode(tok string) (CursorToken, error) {
}
if !strings.HasPrefix(tok, "tok_") {
return CursorToken{}, errors.New("could not decode token, missing prefix")
return CursorToken{}, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing prefix").Str("token", tok).Build()
}
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
@@ -138,7 +138,7 @@ func Decode(tok string) (CursorToken, error) {
var tokenDeserialize cursorTokenSerialize
err = json.Unmarshal(body, &tokenDeserialize)
if err != nil {
return CursorToken{}, err
return CursorToken{}, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).Build()
}
token := CursorToken{Mode: CTMNormal}

254
dataext/casMutex.go Normal file
View File

@@ -0,0 +1,254 @@
package dataext
import (
"context"
"golang.org/x/sync/semaphore"
"runtime"
"sync"
"sync/atomic"
"time"
"unsafe"
)
// from https://github.com/viney-shih/go-lock/blob/2f19fd8ce335e33e0ab9dccb1ff2ce820c3da332/cas.go
// CASMutex is the struct implementing RWMutex with CAS mechanism.
type CASMutex struct {
state casState
turnstile *semaphore.Weighted
broadcastChan chan struct{}
broadcastMut sync.RWMutex
}
func NewCASMutex() *CASMutex {
return &CASMutex{
state: casStateNoLock,
turnstile: semaphore.NewWeighted(1),
broadcastChan: make(chan struct{}),
}
}
type casState int32
const (
casStateUndefined casState = iota - 2 // -2
casStateWriteLock // -1
casStateNoLock // 0
casStateReadLock // >= 1
)
func (m *CASMutex) getState(n int32) casState {
switch st := casState(n); {
case st == casStateWriteLock:
fallthrough
case st == casStateNoLock:
return st
case st >= casStateReadLock:
return casStateReadLock
default:
// actually, it should not happened.
return casStateUndefined
}
}
func (m *CASMutex) listen() <-chan struct{} {
m.broadcastMut.RLock()
defer m.broadcastMut.RUnlock()
return m.broadcastChan
}
func (m *CASMutex) broadcast() {
newCh := make(chan struct{})
m.broadcastMut.Lock()
ch := m.broadcastChan
m.broadcastChan = newCh
m.broadcastMut.Unlock()
close(ch)
}
func (m *CASMutex) tryLock(ctx context.Context) bool {
for {
broker := m.listen()
if atomic.CompareAndSwapInt32(
(*int32)(unsafe.Pointer(&m.state)),
int32(casStateNoLock),
int32(casStateWriteLock),
) {
return true
}
if ctx == nil {
return false
}
select {
case <-ctx.Done():
// timeout or cancellation
return false
case <-broker:
// waiting for signal triggered by m.broadcast() and trying again.
}
}
}
// TryLockWithContext attempts to acquire the lock, blocking until resources
// are available or ctx is done (timeout or cancellation).
func (m *CASMutex) TryLockWithContext(ctx context.Context) bool {
if err := m.turnstile.Acquire(ctx, 1); err != nil {
// Acquire failed due to timeout or cancellation
return false
}
defer m.turnstile.Release(1)
return m.tryLock(ctx)
}
// Lock acquires the lock.
// If it is currently held by others, Lock will wait until it has a chance to acquire it.
func (m *CASMutex) Lock() {
ctx := context.Background()
m.TryLockWithContext(ctx)
}
// TryLock attempts to acquire the lock without blocking.
// Return false if someone is holding it now.
func (m *CASMutex) TryLock() bool {
if !m.turnstile.TryAcquire(1) {
return false
}
defer m.turnstile.Release(1)
return m.tryLock(nil)
}
// TryLockWithTimeout attempts to acquire the lock within a period of time.
// Return false if spending time is more than duration and no chance to acquire it.
func (m *CASMutex) TryLockWithTimeout(duration time.Duration) bool {
ctx, cancel := context.WithTimeout(context.Background(), duration)
defer cancel()
return m.TryLockWithContext(ctx)
}
// Unlock releases the lock.
func (m *CASMutex) Unlock() {
if ok := atomic.CompareAndSwapInt32(
(*int32)(unsafe.Pointer(&m.state)),
int32(casStateWriteLock),
int32(casStateNoLock),
); !ok {
panic("Unlock failed")
}
m.broadcast()
}
func (m *CASMutex) rTryLock(ctx context.Context) bool {
for {
broker := m.listen()
n := atomic.LoadInt32((*int32)(unsafe.Pointer(&m.state)))
st := m.getState(n)
switch st {
case casStateNoLock, casStateReadLock:
if atomic.CompareAndSwapInt32((*int32)(unsafe.Pointer(&m.state)), n, n+1) {
return true
}
}
if ctx == nil {
return false
}
select {
case <-ctx.Done():
// timeout or cancellation
return false
default:
switch st {
// read-lock failed due to concurrence issue, try again immediately
case casStateNoLock, casStateReadLock:
runtime.Gosched() // allow other goroutines to do stuff.
continue
}
}
select {
case <-ctx.Done():
// timeout or cancellation
return false
case <-broker:
// waiting for signal triggered by m.broadcast() and trying again.
}
}
}
// RTryLockWithContext attempts to acquire the read lock, blocking until resources
// are available or ctx is done (timeout or cancellation).
func (m *CASMutex) RTryLockWithContext(ctx context.Context) bool {
if err := m.turnstile.Acquire(ctx, 1); err != nil {
// Acquire failed due to timeout or cancellation
return false
}
m.turnstile.Release(1)
return m.rTryLock(ctx)
}
// RLock acquires the read lock.
// If it is currently held by others writing, RLock will wait until it has a chance to acquire it.
func (m *CASMutex) RLock() {
ctx := context.Background()
m.RTryLockWithContext(ctx)
}
// RTryLock attempts to acquire the read lock without blocking.
// Return false if someone is writing it now.
func (m *CASMutex) RTryLock() bool {
if !m.turnstile.TryAcquire(1) {
return false
}
m.turnstile.Release(1)
return m.rTryLock(nil)
}
// RTryLockWithTimeout attempts to acquire the read lock within a period of time.
// Return false if spending time is more than duration and no chance to acquire it.
func (m *CASMutex) RTryLockWithTimeout(duration time.Duration) bool {
ctx, cancel := context.WithTimeout(context.Background(), duration)
defer cancel()
return m.RTryLockWithContext(ctx)
}
// RUnlock releases the read lock.
func (m *CASMutex) RUnlock() {
n := atomic.AddInt32((*int32)(unsafe.Pointer(&m.state)), -1)
switch m.getState(n) {
case casStateUndefined, casStateWriteLock:
panic("RUnlock failed")
case casStateNoLock:
m.broadcast()
}
}
// RLocker returns a Locker interface that implements the Lock and Unlock methods
// by calling CASMutex.RLock and CASMutex.RUnlock.
func (m *CASMutex) RLocker() sync.Locker {
return (*rlocker)(m)
}
type rlocker CASMutex
func (r *rlocker) Lock() { (*CASMutex)(r).RLock() }
func (r *rlocker) Unlock() { (*CASMutex)(r).RUnlock() }

67
dataext/optional.go Normal file
View File

@@ -0,0 +1,67 @@
package dataext
import (
"encoding/json"
"errors"
)
type JsonOpt[T any] struct {
isSet bool
value T
}
// MarshalJSON returns m as the JSON encoding of m.
func (m JsonOpt[T]) MarshalJSON() ([]byte, error) {
if !m.isSet {
return []byte("null"), nil // actually this would be undefined - but undefined is not valid JSON
}
return json.Marshal(m.value)
}
// UnmarshalJSON sets *m to a copy of data.
func (m *JsonOpt[T]) UnmarshalJSON(data []byte) error {
if m == nil {
return errors.New("JsonOpt: UnmarshalJSON on nil pointer")
}
m.isSet = true
return json.Unmarshal(data, &m.value)
}
func (m JsonOpt[T]) IsSet() bool {
return m.isSet
}
func (m JsonOpt[T]) IsUnset() bool {
return !m.isSet
}
func (m JsonOpt[T]) Value() (T, bool) {
if !m.isSet {
return *new(T), false
}
return m.value, true
}
func (m JsonOpt[T]) ValueOrNil() *T {
if !m.isSet {
return nil
}
return &m.value
}
func (m JsonOpt[T]) MustValue() T {
if !m.isSet {
panic("value not set")
}
return m.value
}
func (m JsonOpt[T]) IfSet(fn func(v T)) bool {
if !m.isSet {
return false
}
fn(m.value)
return true
}

98
dataext/ringBuffer.go Normal file
View File

@@ -0,0 +1,98 @@
package dataext
import "iter"
type RingBuffer[T any] struct {
items []T //
capacity int // max number of items the buffer can hold
size int // how many items are in the buffer
head int // ptr to next item
}
func NewRingBuffer[T any](capacity int) *RingBuffer[T] {
return &RingBuffer[T]{
items: make([]T, capacity),
capacity: capacity,
size: 0,
head: 0,
}
}
func (rb *RingBuffer[T]) Push(item T) {
if rb.size < rb.capacity {
rb.size++
}
rb.items[rb.head] = item
rb.head = (rb.head + 1) % rb.capacity
}
func (rb *RingBuffer[T]) Peek() (T, bool) {
if rb.size == 0 {
return *new(T), false
}
return rb.items[(rb.head-1+rb.capacity)%rb.capacity], true
}
func (rb *RingBuffer[T]) Items() []T {
if rb.size < rb.capacity {
return rb.items[:rb.size]
}
return append(rb.items[rb.head:], rb.items[:rb.head]...)
}
func (rb *RingBuffer[T]) Size() int {
return rb.size
}
func (rb *RingBuffer[T]) Capacity() int {
return rb.capacity
}
func (rb *RingBuffer[T]) Clear() {
rb.size = 0
rb.head = 0
}
func (rb *RingBuffer[T]) IsFull() bool {
return rb.size == rb.capacity
}
func (rb *RingBuffer[T]) At(i int) T {
if i < 0 || i >= rb.size {
panic("Index out of bounds")
}
if rb.size < rb.capacity {
return rb.items[i]
}
return rb.items[(rb.head+i)%rb.capacity]
}
func (rb *RingBuffer[T]) Get(i int) (T, bool) {
if i < 0 || i >= rb.size {
return *new(T), false
}
if rb.size < rb.capacity {
return rb.items[i], true
}
return rb.items[(rb.head+i)%rb.capacity], true
}
func (rb *RingBuffer[T]) Iter() iter.Seq[T] {
return func(yield func(T) bool) {
for i := 0; i < rb.size; i++ {
if !yield(rb.At(i)) {
return
}
}
}
}
func (rb *RingBuffer[T]) Iter2() iter.Seq2[int, T] {
return func(yield func(int, T) bool) {
for i := 0; i < rb.size; i++ {
if !yield(i, rb.At(i)) {
return
}
}
}
}

View File

@@ -35,6 +35,23 @@ func (s *SyncMap[TKey, TData]) SetIfNotContains(key TKey, data TData) bool {
return true
}
func (s *SyncMap[TKey, TData]) SetIfNotContainsFunc(key TKey, data func() TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TKey]TData)
}
if _, existsInPreState := s.data[key]; existsInPreState {
return false
}
s.data[key] = data()
return true
}
func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) {
s.lock.Lock()
defer s.lock.Unlock()
@@ -50,6 +67,39 @@ func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) {
}
}
func (s *SyncMap[TKey, TData]) GetAndSetIfNotContains(key TKey, data TData) TData {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TKey]TData)
}
if v, ok := s.data[key]; ok {
return v
} else {
s.data[key] = data
return data
}
}
func (s *SyncMap[TKey, TData]) GetAndSetIfNotContainsFunc(key TKey, data func() TData) TData {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TKey]TData)
}
if v, ok := s.data[key]; ok {
return v
} else {
dataObj := data()
s.data[key] = dataObj
return dataObj
}
}
func (s *SyncMap[TKey, TData]) Delete(key TKey) bool {
s.lock.Lock()
defer s.lock.Unlock()

View File

@@ -19,6 +19,14 @@ func (s Single[T1]) TupleValues() []any {
return []any{s.V1}
}
func NewSingle[T1 any](v1 T1) Single[T1] {
return Single[T1]{V1: v1}
}
func NewTuple1[T1 any](v1 T1) Single[T1] {
return Single[T1]{V1: v1}
}
// ----------------------------------------------------------------------------
type Tuple[T1 any, T2 any] struct {
@@ -34,6 +42,14 @@ func (t Tuple[T1, T2]) TupleValues() []any {
return []any{t.V1, t.V2}
}
func NewTuple[T1 any, T2 any](v1 T1, v2 T2) Tuple[T1, T2] {
return Tuple[T1, T2]{V1: v1, V2: v2}
}
func NewTuple2[T1 any, T2 any](v1 T1, v2 T2) Tuple[T1, T2] {
return Tuple[T1, T2]{V1: v1, V2: v2}
}
// ----------------------------------------------------------------------------
type Triple[T1 any, T2 any, T3 any] struct {
@@ -50,6 +66,14 @@ func (t Triple[T1, T2, T3]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3}
}
func NewTriple[T1 any, T2 any, T3 any](v1 T1, v2 T2, v3 T3) Triple[T1, T2, T3] {
return Triple[T1, T2, T3]{V1: v1, V2: v2, V3: v3}
}
func NewTuple3[T1 any, T2 any, T3 any](v1 T1, v2 T2, v3 T3) Triple[T1, T2, T3] {
return Triple[T1, T2, T3]{V1: v1, V2: v2, V3: v3}
}
// ----------------------------------------------------------------------------
type Quadruple[T1 any, T2 any, T3 any, T4 any] struct {
@@ -67,6 +91,14 @@ func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4}
}
func NewQuadruple[T1 any, T2 any, T3 any, T4 any](v1 T1, v2 T2, v3 T3, v4 T4) Quadruple[T1, T2, T3, T4] {
return Quadruple[T1, T2, T3, T4]{V1: v1, V2: v2, V3: v3, V4: v4}
}
func NewTuple4[T1 any, T2 any, T3 any, T4 any](v1 T1, v2 T2, v3 T3, v4 T4) Quadruple[T1, T2, T3, T4] {
return Quadruple[T1, T2, T3, T4]{V1: v1, V2: v2, V3: v3, V4: v4}
}
// ----------------------------------------------------------------------------
type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct {
@@ -86,6 +118,14 @@ func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any {
}
func NewQuintuple[T1 any, T2 any, T3 any, T4 any, T5 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5) Quintuple[T1, T2, T3, T4, T5] {
return Quintuple[T1, T2, T3, T4, T5]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5}
}
func NewTuple5[T1 any, T2 any, T3 any, T4 any, T5 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5) Quintuple[T1, T2, T3, T4, T5] {
return Quintuple[T1, T2, T3, T4, T5]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5}
}
// ----------------------------------------------------------------------------
type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct {
@@ -106,6 +146,14 @@ func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any {
}
func NewSextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6) Sextuple[T1, T2, T3, T4, T5, T6] {
return Sextuple[T1, T2, T3, T4, T5, T6]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6}
}
func NewTuple6[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6) Sextuple[T1, T2, T3, T4, T5, T6] {
return Sextuple[T1, T2, T3, T4, T5, T6]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6}
}
// ----------------------------------------------------------------------------
type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct {
@@ -126,6 +174,14 @@ func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7}
}
func NewSeptuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7) Septuple[T1, T2, T3, T4, T5, T6, T7] {
return Septuple[T1, T2, T3, T4, T5, T6, T7]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7}
}
func NewTuple7[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7) Septuple[T1, T2, T3, T4, T5, T6, T7] {
return Septuple[T1, T2, T3, T4, T5, T6, T7]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7}
}
// ----------------------------------------------------------------------------
type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct {
@@ -147,6 +203,14 @@ func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8}
}
func NewOctuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8) Octuple[T1, T2, T3, T4, T5, T6, T7, T8] {
return Octuple[T1, T2, T3, T4, T5, T6, T7, T8]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8}
}
func NewTuple8[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8) Octuple[T1, T2, T3, T4, T5, T6, T7, T8] {
return Octuple[T1, T2, T3, T4, T5, T6, T7, T8]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8}
}
// ----------------------------------------------------------------------------
type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct {
@@ -168,3 +232,10 @@ func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int {
func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any {
return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9}
}
func NewNonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8, v9 T9) Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9] {
return Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8, V9: v9}
}
func NewTuple9[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8, v9 T9) Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9] {
return Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8, V9: v9}
}

View File

@@ -5,6 +5,8 @@ type Enum interface {
ValuesAny() []any
ValuesMeta() []EnumMetaValue
VarName() string
TypeName() string
PackageName() string
}
type StringEnum interface {

View File

@@ -30,6 +30,10 @@ import (
// If possible add metadata to the error (eg the id that was not found, ...), the methods are the same as in zerolog
// return nil, exerror.Wrap(err, "do something failed").Str("someid", id).Int("count", in.Count).Build()
//
// You can also add extra-data to an error with Extra(..)
// in contrast to metadata is extradata always printed in the resulting error and is more intended for additional (programmatically readable) data in addition to the errortype
// (metadata is more internal debug info/help)
//
// You can change the errortype with `.User()` and `.System()` (User-errors are 400 and System-errors 500)
// You can also manually set the statuscode with `.WithStatuscode(http.NotFound)`
// You can set the type with `WithType(..)`
@@ -55,23 +59,12 @@ import (
// => Wrap/New + Fatal
//
var stackSkipLogger zerolog.Logger
func init() {
cw := zerolog.ConsoleWriter{
Out: os.Stdout,
TimeFormat: "2006-01-02 15:04:05 Z07:00",
}
multi := zerolog.MultiLevelWriter(cw)
stackSkipLogger = zerolog.New(multi).With().Timestamp().CallerWithSkipFrameCount(4).Logger()
}
type Builder struct {
wrappedErr error
errorData *ExErr
containsGinData bool
noLog bool
wrappedErr error
errorData *ExErr
containsGinData bool
containsContextData bool
noLog bool
}
func Get(err error) *Builder {
@@ -87,12 +80,14 @@ func Wrap(err error, msg string) *Builder {
return &Builder{errorData: newExErr(CatSystem, TypeInternal, msg)} // prevent NPE if we call Wrap with err==nil
}
v := FromError(err)
if !pkgconfig.RecursiveErrors {
v := FromError(err)
v.Message = msg
return &Builder{wrappedErr: err, errorData: v}
} else {
return &Builder{wrappedErr: err, errorData: wrapExErr(v, msg, CatWrap, 1)}
}
return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)}
}
// ----------------------------------------------------------------------------
@@ -309,27 +304,27 @@ func (b *Builder) Errs(key string, val []error) *Builder {
func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder {
if v := ctx.Value("start_timestamp"); v != nil {
if t, ok := v.(time.Time); ok {
b.Time("ctx.startTimestamp", t)
b.Time("ctx.endTimestamp", time.Now())
b.Time("ctx_startTimestamp", t)
b.Time("ctx_endTimestamp", time.Now())
}
}
b.Str("gin.method", req.Method)
b.Str("gin.path", g.FullPath())
b.Strs("gin.header", extractHeader(g.Request.Header))
b.Str("gin_method", req.Method)
b.Str("gin_path", g.FullPath())
b.Strs("gin_header", extractHeader(g.Request.Header))
if req.URL != nil {
b.Str("gin.url", req.URL.String())
b.Str("gin_url", req.URL.String())
}
if ctxVal := g.GetString("apiversion"); ctxVal != "" {
b.Str("gin.context.apiversion", ctxVal)
b.Str("gin_context_apiversion", ctxVal)
}
if ctxVal := g.GetString("uid"); ctxVal != "" {
b.Str("gin.context.uid", ctxVal)
b.Str("gin_context_uid", ctxVal)
}
if ctxVal := g.GetString("fcmId"); ctxVal != "" {
b.Str("gin.context.fcmid", ctxVal)
b.Str("gin_context_fcmid", ctxVal)
}
if ctxVal := g.GetString("reqid"); ctxVal != "" {
b.Str("gin.context.reqid", ctxVal)
b.Str("gin_context_reqid", ctxVal)
}
if req.Method != "GET" && req.Body != nil {
@@ -340,12 +335,12 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
var prettyJSON bytes.Buffer
err = json.Indent(&prettyJSON, bin, "", " ")
if err == nil {
b.Str("gin.body", string(prettyJSON.Bytes()))
b.Str("gin_body", string(prettyJSON.Bytes()))
} else {
b.Bytes("gin.body", bin)
b.Bytes("gin_body", bin)
}
} else {
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
}
}
}
@@ -355,9 +350,9 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
if brc, ok := req.Body.(dataext.BufferedReadCloser); ok {
if bin, err := brc.BufferedAll(); err == nil {
if len(bin) < 16*1024 {
b.Bytes("gin.body", bin)
b.Bytes("gin_body", bin)
} else {
b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type")))
}
}
}
@@ -365,31 +360,18 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request)
}
pkgconfig.ExtendGinMeta(ctx, b, g, req)
b.containsGinData = true
return b
}
func formatHeader(header map[string][]string) string {
ml := 1
for k, _ := range header {
if len(k) > ml {
ml = len(k)
}
}
r := ""
for k, v := range header {
if r != "" {
r += "\n"
}
for _, hval := range v {
value := hval
value = strings.ReplaceAll(value, "\n", "\\n")
value = strings.ReplaceAll(value, "\r", "\\r")
value = strings.ReplaceAll(value, "\t", "\\t")
r += langext.StrPadRight(k, " ", ml) + " := " + value
}
}
return r
func (b *Builder) CtxData(method Method, ctx context.Context) *Builder {
pkgconfig.ExtendContextMeta(b, method, ctx)
b.containsContextData = true
return b
}
func extractHeader(header map[string][]string) []string {
@@ -408,21 +390,35 @@ func extractHeader(header map[string][]string) []string {
// ----------------------------------------------------------------------------
// Extra adds additional data to the error
// this is not like the other metadata (like Id(), Str(), etc)
// this data is public and will be printed/outputted
func (b *Builder) Extra(key string, val any) *Builder {
b.errorData.Extra[key] = val
return b
}
// ----------------------------------------------------------------------------
// Build creates a new error, ready to pass up the stack
// If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout
// Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces
// Can be locally suppressed with Builder.NoLog()
func (b *Builder) Build() error {
func (b *Builder) Build(ctxs ...context.Context) error {
warnOnPkgConfigNotInitialized()
for _, dctx := range ctxs {
b.CtxData(MethodBuild, dctx)
}
if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil {
return b.wrappedErr
}
if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) {
b.errorData.ShortLog(stackSkipLogger.Error())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Error())
} else if pkgconfig.ZeroLogAllTraces && !b.noLog {
b.errorData.ShortLog(stackSkipLogger.Error())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Error())
}
b.errorData.CallListener(MethodBuild)
@@ -439,12 +435,14 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) {
b.GinReq(ctx, g, g.Request)
}
b.CtxData(MethodOutput, ctx)
b.errorData.Output(g)
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
b.errorData.Log(stackSkipLogger.Error())
} else if b.errorData.Severity == SevWarn {
b.errorData.Log(stackSkipLogger.Warn())
if (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) && (pkgconfig.ZeroLogErrGinOutput || pkgconfig.ZeroLogAllGinOutput) {
b.errorData.Log(pkgconfig.ZeroLogger.Error())
} else if (b.errorData.Severity == SevWarn) && (pkgconfig.ZeroLogAllGinOutput) {
b.errorData.Log(pkgconfig.ZeroLogger.Warn())
}
b.errorData.CallListener(MethodOutput)
@@ -452,11 +450,21 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) {
// Print prints the error
// If the error is SevErr we also send it to the error-service
func (b *Builder) Print() {
func (b *Builder) Print(ctxs ...context.Context) {
warnOnPkgConfigNotInitialized()
for _, dctx := range ctxs {
b.CtxData(MethodPrint, dctx)
}
if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal {
b.errorData.Log(stackSkipLogger.Error())
b.errorData.Log(pkgconfig.ZeroLogger.Error())
} else if b.errorData.Severity == SevWarn {
b.errorData.ShortLog(stackSkipLogger.Warn())
b.errorData.ShortLog(pkgconfig.ZeroLogger.Warn())
} else if b.errorData.Severity == SevInfo {
b.errorData.ShortLog(pkgconfig.ZeroLogger.Info())
} else {
b.errorData.ShortLog(pkgconfig.ZeroLogger.Debug())
}
b.errorData.CallListener(MethodPrint)
@@ -468,9 +476,15 @@ func (b *Builder) Format(level LogPrintLevel) string {
// Fatal prints the error and terminates the program
// If the error is SevErr we also send it to the error-service
func (b *Builder) Fatal() {
func (b *Builder) Fatal(ctxs ...context.Context) {
b.errorData.Severity = SevFatal
b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel))
for _, dctx := range ctxs {
b.CtxData(MethodFatal, dctx)
}
b.errorData.Log(pkgconfig.ZeroLogger.WithLevel(zerolog.FatalLevel))
b.errorData.CallListener(MethodFatal)

View File

@@ -12,6 +12,8 @@ import (
var reflectTypeStr = reflect.TypeOf("")
func FromError(err error) *ExErr {
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := err.(*ExErr); ok {
// A simple ExErr
return verr
@@ -31,6 +33,7 @@ func FromError(err error) *ExErr {
Caller: "",
OriginalError: nil,
Meta: getForeignMeta(err),
Extra: make(map[string]any),
}
}
@@ -48,6 +51,7 @@ func newExErr(cat ErrorCategory, errtype ErrorType, msg string) *ExErr {
Caller: callername(2),
OriginalError: nil,
Meta: make(map[string]MetaValue),
Extra: make(map[string]any),
}
}
@@ -56,7 +60,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE
UniqueID: newID(),
Category: cat,
Type: TypeWrap,
Severity: SevErr,
Severity: e.Severity,
Timestamp: time.Now(),
StatusCode: e.StatusCode,
Message: msg,
@@ -65,6 +69,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE
Caller: callername(1 + stacktraceskip),
OriginalError: e,
Meta: make(map[string]MetaValue),
Extra: langext.CopyMap(langext.ForceMap(e.Extra)),
}
}
@@ -181,7 +186,7 @@ func getReflectedMetaValues(value interface{}, remainingDepth int) map[string]Me
jsonval, err := json.Marshal(value)
if err != nil {
panic(err) // gets recovered later up
return map[string]MetaValue{"": {DataType: MDTString, Value: fmt.Sprintf("Failed to Marshal %T:\n%+v", value, value)}}
}
return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}}

View File

@@ -1,91 +1,14 @@
package exerr
import (
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
type Method string
const (
MethodOutput Method = "OUTPUT"
MethodPrint Method = "PRINT"
MethodBuild Method = "BUILD"
MethodFatal Method = "FATAL"
)
type ErrorCategory struct{ Category string }
var (
CatWrap = ErrorCategory{"Wrap"} // The error is simply wrapping another error (e.g. when a grpc call returns an error)
CatSystem = ErrorCategory{"System"} // An internal system error (e.g. connection to db failed)
CatUser = ErrorCategory{"User"} // The user (the API caller) did something wrong (e.g. he has no permissions to do this)
CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value
)
//goland:noinspection GoUnusedGlobalVariable
var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign}
type ErrorSeverity struct{ Severity string }
var (
SevTrace = ErrorSeverity{"Trace"}
SevDebug = ErrorSeverity{"Debug"}
SevInfo = ErrorSeverity{"Info"}
SevWarn = ErrorSeverity{"Warn"}
SevErr = ErrorSeverity{"Err"}
SevFatal = ErrorSeverity{"Fatal"}
)
//goland:noinspection GoUnusedGlobalVariable
var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal}
type ErrorType struct {
Key string
DefaultStatusCode *int
}
//goland:noinspection GoUnusedGlobalVariable
var (
TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
TypePanic = NewType("PANIC", langext.Ptr(500))
TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
TypeMongoInvalidOpt = NewType("MONGO_INVALIDOPT", langext.Ptr(500))
TypeSQLQuery = NewType("SQL_QUERY", langext.Ptr(500))
TypeSQLBuild = NewType("SQL_BUILD", langext.Ptr(500))
TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500))
TypeWrap = NewType("Wrap", nil)
TypeBindFailURI = NewType("BINDFAIL_URI", langext.Ptr(400))
TypeBindFailQuery = NewType("BINDFAIL_QUERY", langext.Ptr(400))
TypeBindFailJSON = NewType("BINDFAIL_JSON", langext.Ptr(400))
TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400))
TypeBindFailHeader = NewType("BINDFAIL_HEADER", langext.Ptr(400))
TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400))
TypeInvalidCSID = NewType("INVALID_CSID", langext.Ptr(400))
TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400))
TypeGoogleResponse = NewType("GOOGLE_RESPONSE", langext.Ptr(400))
TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401))
TypeAuthFailed = NewType("AUTH_FAILED", langext.Ptr(401))
// other values come from the downstream application that uses goext
)
var registeredTypes = dataext.SyncMap[string, ErrorType]{}
func NewType(key string, defStatusCode *int) ErrorType {
et := ErrorType{key, defStatusCode}
registeredTypes.Set(key, et)
return et
}
func ListRegisteredTypes() []ErrorType {
return registeredTypes.GetAllValues()
}
type LogPrintLevel string
const (

89
exerr/dataCategory.go Normal file
View File

@@ -0,0 +1,89 @@
package exerr
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"reflect"
)
type ErrorCategory struct{ Category string }
var (
CatWrap = ErrorCategory{"Wrap"} // The error is simply wrapping another error (e.g. when a grpc call returns an error)
CatSystem = ErrorCategory{"System"} // An internal system error (e.g. connection to db failed)
CatUser = ErrorCategory{"User"} // The user (the API caller) did something wrong (e.g. he has no permissions to do this)
CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value
)
func (e *ErrorCategory) UnmarshalJSON(bytes []byte) error {
return json.Unmarshal(bytes, &e.Category)
}
func (e ErrorCategory) MarshalJSON() ([]byte, error) {
return json.Marshal(e.Category)
}
func (e *ErrorCategory) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*e = ErrorCategory{}
return nil
}
if bt != bson.TypeString {
return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt))
}
var tt string
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
*e = ErrorCategory{tt}
return nil
}
func (e ErrorCategory) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(e.Category)
}
func (e ErrorCategory) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = e.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&e))
} else {
val.Set(reflect.ValueOf(e))
}
return nil
}
//goland:noinspection GoUnusedGlobalVariable
var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign}

91
exerr/dataSeverity.go Normal file
View File

@@ -0,0 +1,91 @@
package exerr
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"reflect"
)
type ErrorSeverity struct{ Severity string }
var (
SevTrace = ErrorSeverity{"Trace"}
SevDebug = ErrorSeverity{"Debug"}
SevInfo = ErrorSeverity{"Info"}
SevWarn = ErrorSeverity{"Warn"}
SevErr = ErrorSeverity{"Err"}
SevFatal = ErrorSeverity{"Fatal"}
)
func (e *ErrorSeverity) UnmarshalJSON(bytes []byte) error {
return json.Unmarshal(bytes, &e.Severity)
}
func (e ErrorSeverity) MarshalJSON() ([]byte, error) {
return json.Marshal(e.Severity)
}
func (e *ErrorSeverity) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*e = ErrorSeverity{}
return nil
}
if bt != bson.TypeString {
return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt))
}
var tt string
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
*e = ErrorSeverity{tt}
return nil
}
func (e ErrorSeverity) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(e.Severity)
}
func (e ErrorSeverity) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = e.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&e))
} else {
val.Set(reflect.ValueOf(e))
}
return nil
}
//goland:noinspection GoUnusedGlobalVariable
var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal}

156
exerr/dataType.go Normal file
View File

@@ -0,0 +1,156 @@
package exerr
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/dataext"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
)
type ErrorType struct {
Key string
DefaultStatusCode *int
}
//goland:noinspection GoUnusedGlobalVariable
var (
TypeInternal = NewType("INTERNAL_ERROR", langext.Ptr(500))
TypePanic = NewType("PANIC", langext.Ptr(500))
TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500))
TypeAssert = NewType("ASSERT", langext.Ptr(500))
TypeMongoQuery = NewType("MONGO_QUERY", langext.Ptr(500))
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
TypeMongoInvalidOpt = NewType("MONGO_INVALIDOPT", langext.Ptr(500))
TypeSQLQuery = NewType("SQL_QUERY", langext.Ptr(500))
TypeSQLBuild = NewType("SQL_BUILD", langext.Ptr(500))
TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500))
TypeWrap = NewType("Wrap", nil)
TypeBindFailURI = NewType("BINDFAIL_URI", langext.Ptr(400))
TypeBindFailQuery = NewType("BINDFAIL_QUERY", langext.Ptr(400))
TypeBindFailJSON = NewType("BINDFAIL_JSON", langext.Ptr(400))
TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400))
TypeBindFailHeader = NewType("BINDFAIL_HEADER", langext.Ptr(400))
TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400))
TypeInvalidCSID = NewType("INVALID_CSID", langext.Ptr(400))
TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400))
TypeGoogleResponse = NewType("GOOGLE_RESPONSE", langext.Ptr(400))
TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401))
TypeAuthFailed = NewType("AUTH_FAILED", langext.Ptr(401))
TypeInvalidImage = NewType("IMAGEEXT_INVALID_IMAGE", langext.Ptr(400))
TypeInvalidMimeType = NewType("IMAGEEXT_INVALID_MIMETYPE", langext.Ptr(400))
// other values come from the downstream application that uses goext
)
func (e *ErrorType) UnmarshalJSON(bytes []byte) error {
var k string
err := json.Unmarshal(bytes, &k)
if err != nil {
return err
}
if d, ok := registeredTypes.Get(k); ok {
*e = d
return nil
} else {
*e = ErrorType{k, nil}
return nil
}
}
func (e ErrorType) MarshalJSON() ([]byte, error) {
return json.Marshal(e.Key)
}
func (e *ErrorType) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*e = ErrorType{}
return nil
}
if bt != bson.TypeString {
return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt))
}
var tt string
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
if d, ok := registeredTypes.Get(tt); ok {
*e = d
return nil
} else {
*e = ErrorType{tt, nil}
return nil
}
}
func (e ErrorType) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(e.Key)
}
func (e ErrorType) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = e.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&e))
} else {
val.Set(reflect.ValueOf(e))
}
return nil
}
var registeredTypes = dataext.SyncMap[string, ErrorType]{}
func NewType(key string, defStatusCode *int) ErrorType {
et := ErrorType{key, defStatusCode}
registeredTypes.Set(key, et)
return et
}
func ListRegisteredTypes() []ErrorType {
return registeredTypes.GetAllValues()
}

153
exerr/data_test.go Normal file
View File

@@ -0,0 +1,153 @@
package exerr
import (
"context"
"encoding/json"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"testing"
"time"
)
func TestJSONMarshalErrorCategory(t *testing.T) {
c1 := CatSystem
jsonbin := tst.Must(json.Marshal(c1))(t)
var c2 ErrorCategory
tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2))
tst.AssertEqual(t, c1, c2)
tst.AssertEqual(t, string(jsonbin), "\"System\"")
}
func TestJSONMarshalErrorSeverity(t *testing.T) {
c1 := SevErr
jsonbin := tst.Must(json.Marshal(c1))(t)
var c2 ErrorSeverity
tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2))
tst.AssertEqual(t, c1, c2)
tst.AssertEqual(t, string(jsonbin), "\"Err\"")
}
func TestJSONMarshalErrorType(t *testing.T) {
c1 := TypeNotImplemented
jsonbin := tst.Must(json.Marshal(c1))(t)
var c2 ErrorType
tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2))
tst.AssertEqual(t, c1, c2)
tst.AssertEqual(t, string(jsonbin), "\"NOT_IMPLEMENTED\"")
}
func TestBSONMarshalErrorCategory(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond)
defer cancel()
client, err := mongo.Connect(ctx)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
err = client.Ping(ctx, nil)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
primimd := primitive.NewObjectID()
_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": CatSystem})
tst.AssertNoErr(t, err)
cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}})
var c1 struct {
ID primitive.ObjectID `bson:"_id"`
Val ErrorCategory `bson:"val"`
}
err = cursor.Decode(&c1)
tst.AssertNoErr(t, err)
tst.AssertEqual(t, c1.Val, CatSystem)
}
func TestBSONMarshalErrorSeverity(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond)
defer cancel()
client, err := mongo.Connect(ctx)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
err = client.Ping(ctx, nil)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
primimd := primitive.NewObjectID()
_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": SevErr})
tst.AssertNoErr(t, err)
cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}})
var c1 struct {
ID primitive.ObjectID `bson:"_id"`
Val ErrorSeverity `bson:"val"`
}
err = cursor.Decode(&c1)
tst.AssertNoErr(t, err)
tst.AssertEqual(t, c1.Val, SevErr)
}
func TestBSONMarshalErrorType(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond)
defer cancel()
client, err := mongo.Connect(ctx)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
err = client.Ping(ctx, nil)
if err != nil {
t.Skip("Skip test - no local mongo found")
return
}
primimd := primitive.NewObjectID()
_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": TypeNotImplemented})
tst.AssertNoErr(t, err)
cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}})
var c1 struct {
ID primitive.ObjectID `bson:"_id"`
Val ErrorType `bson:"val"`
}
err = cursor.Decode(&c1)
tst.AssertNoErr(t, err)
tst.AssertEqual(t, c1.Val, TypeNotImplemented)
}

View File

@@ -1,19 +1,29 @@
package exerr
import (
"context"
"fmt"
"github.com/gin-gonic/gin"
"github.com/rs/zerolog"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"net/http"
"os"
)
type ErrorPackageConfig struct {
ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal)
ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities)
RecursiveErrors bool // errors contains their Origin-Error
ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output()
IncludeMetaInGinOutput bool // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output()
ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields
DisableErrorWrapping bool // Disables the exerr.Wrap()...Build() function - will always return the original error
ZeroLogErrTraces bool // autom print zerolog logs on .Build() (for SevErr and SevFatal)
ZeroLogAllTraces bool // autom print zerolog logs on .Build() (for all Severities)
RecursiveErrors bool // errors contains their Origin-Error
ExtendedGinOutput bool // Log extended data (trace, meta, ...) to gin in err.Output()
IncludeMetaInGinOutput bool // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output()
ExtendGinOutput func(err *ExErr, json map[string]any) // (Optionally) extend the gin output with more fields
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields
DisableErrorWrapping bool // Disables the exerr.Wrap()...Build() function - will always return the original error
ZeroLogErrGinOutput bool // autom print zerolog logs on ginext.Error() / .Output(gin) (for SevErr and SevFatal)
ZeroLogAllGinOutput bool // autom print zerolog logs on ginext.Error() / .Output(gin) (for all Severities)
ExtendGinMeta func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) // (Optionally) extend the final error meta values with additional data from the gin context (a few are automatically added, here more can be included)
ExtendContextMeta func(b *Builder, method Method, dctx context.Context) // (Optionally) extend the final error meta values with additional data from the context (a few are automatically added, here more can be included)
ZeroLogger zerolog.Logger // The logger used to print exerr log messages
}
type ErrorPackageConfigInit struct {
@@ -25,6 +35,11 @@ type ErrorPackageConfigInit struct {
ExtendGinOutput func(err *ExErr, json map[string]any)
ExtendGinDataOutput func(err *ExErr, depth int, json map[string]any)
DisableErrorWrapping *bool
ZeroLogErrGinOutput *bool
ZeroLogAllGinOutput *bool
ExtendGinMeta func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request)
ExtendContextMeta func(b *Builder, method Method, dctx context.Context)
ZeroLogger *zerolog.Logger
}
var initialized = false
@@ -38,6 +53,10 @@ var pkgconfig = ErrorPackageConfig{
ExtendGinOutput: func(err *ExErr, json map[string]any) {},
ExtendGinDataOutput: func(err *ExErr, depth int, json map[string]any) {},
DisableErrorWrapping: false,
ZeroLogErrGinOutput: true,
ZeroLogAllGinOutput: false,
ExtendGinMeta: func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {},
ExtendContextMeta: func(b *Builder, method Method, dctx context.Context) {},
}
// Init initializes the exerr packages
@@ -50,6 +69,8 @@ func Init(cfg ErrorPackageConfigInit) {
ego := func(err *ExErr, json map[string]any) {}
egdo := func(err *ExErr, depth int, json map[string]any) {}
egm := func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {}
egcm := func(b *Builder, method Method, dctx context.Context) {}
if cfg.ExtendGinOutput != nil {
ego = cfg.ExtendGinOutput
@@ -57,6 +78,19 @@ func Init(cfg ErrorPackageConfigInit) {
if cfg.ExtendGinDataOutput != nil {
egdo = cfg.ExtendGinDataOutput
}
if cfg.ExtendGinMeta != nil {
egm = cfg.ExtendGinMeta
}
if cfg.ExtendContextMeta != nil {
egcm = cfg.ExtendContextMeta
}
var logger zerolog.Logger
if cfg.ZeroLogger != nil {
logger = *cfg.ZeroLogger
} else {
logger = newDefaultLogger()
}
pkgconfig = ErrorPackageConfig{
ZeroLogErrTraces: langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces),
@@ -67,11 +101,27 @@ func Init(cfg ErrorPackageConfigInit) {
ExtendGinOutput: ego,
ExtendGinDataOutput: egdo,
DisableErrorWrapping: langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping),
ZeroLogAllGinOutput: langext.Coalesce(cfg.ZeroLogAllGinOutput, pkgconfig.ZeroLogAllGinOutput),
ZeroLogErrGinOutput: langext.Coalesce(cfg.ZeroLogErrGinOutput, pkgconfig.ZeroLogErrGinOutput),
ExtendGinMeta: egm,
ExtendContextMeta: egcm,
ZeroLogger: logger,
}
initialized = true
}
func newDefaultLogger() zerolog.Logger {
cw := zerolog.ConsoleWriter{
Out: os.Stdout,
TimeFormat: "2006-01-02 15:04:05 Z07:00",
}
multi := zerolog.MultiLevelWriter(cw)
return zerolog.New(multi).With().Timestamp().CallerWithSkipFrameCount(4).Logger()
}
func Initialized() bool {
return initialized
}

View File

@@ -1,6 +1,7 @@
package exerr
import (
"fmt"
"github.com/rs/xid"
"github.com/rs/zerolog"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
@@ -26,7 +27,8 @@ type ExErr struct {
OriginalError *ExErr `json:"originalError"`
Meta MetaMap `json:"meta"`
Extra map[string]any `json:"extra"`
Meta MetaMap `json:"meta"`
}
func (ee *ExErr) Error() string {
@@ -36,6 +38,13 @@ func (ee *ExErr) Error() string {
// Unwrap must be implemented so that some error.XXX methods work
func (ee *ExErr) Unwrap() error {
if ee.OriginalError == nil {
if ee.WrappedErr != nil {
if werr, ok := ee.WrappedErr.(error); ok {
return werr
}
}
return nil // this is neccessary - otherwise we return a wrapped nil and the `x == nil` comparison fails (= panic in errors.Is and other failures)
}
return ee.OriginalError
@@ -81,9 +90,29 @@ func (ee *ExErr) Log(evt *zerolog.Event) {
}
func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
// [LogPrintShort]
//
// - Only print message and type
// - Used e.g. for logging to the console when Build is called
// - also used in Print() if level == Warn/Info
//
// [LogPrintOverview]
//
// - print message, extra and errortrace
//
// [LogPrintFull]
//
// - print full error, with meta and extra, and trace, etc
// - Used in Output() and Print()
//
if lvl == LogPrintShort {
msg := ee.Message
if msg == "" {
msg = ee.RecursiveMessage()
}
if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign {
msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")"
}
@@ -98,6 +127,10 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
for exk, exv := range ee.Extra {
str += fmt.Sprintf(" # [[[ %s ==> %v ]]]\n", exk, exv)
}
indent := ""
for curr := ee; curr != nil; curr = curr.OriginalError {
indent += " "
@@ -119,6 +152,10 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n"
for exk, exv := range ee.Extra {
str += fmt.Sprintf(" # [[[ %s ==> %v ]]]\n", exk, exv)
}
indent := ""
for curr := ee; curr != nil; curr = curr.OriginalError {
indent += " "
@@ -325,6 +362,14 @@ func (ee *ExErr) GetMetaTime(key string) (time.Time, bool) {
return time.Time{}, false
}
func (ee *ExErr) GetExtra(key string) (any, bool) {
if v, ok := ee.Extra[key]; ok {
return v, true
}
return nil, false
}
// contains test if the supplied error is contained in this error (anywhere in the chain)
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
if original == nil {

View File

@@ -2,10 +2,19 @@ package exerr
import (
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"os"
"testing"
)
func TestMain(m *testing.M) {
if !Initialized() {
Init(ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse})
}
os.Exit(m.Run())
}
type golangErr struct {
Message string
}

View File

@@ -15,10 +15,10 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la
ginJson["id"] = ee.UniqueID
}
if ee.Category != CatWrap {
ginJson["category"] = ee.Category
ginJson["category"] = ee.Category.Category
}
if ee.Type != TypeWrap {
ginJson["type"] = ee.Type
ginJson["type"] = ee.Type.Key
}
if ee.StatusCode != nil {
ginJson["statuscode"] = ee.StatusCode
@@ -30,7 +30,7 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la
ginJson["caller"] = ee.Caller
}
if ee.Severity != SevErr {
ginJson["severity"] = ee.Severity
ginJson["severity"] = ee.Severity.Severity
}
if ee.Timestamp != (time.Time{}) {
ginJson["time"] = ee.Timestamp.Format(time.RFC3339)
@@ -90,6 +90,20 @@ func (ee *ExErr) ToAPIJson(applyExtendListener bool, includeWrappedErrors bool,
apiOutput["__data"] = ee.toJson(0, applyExtendListener, includeMetaFields)
}
for exkey, exval := range ee.Extra {
// ensure we do not override existing values
for {
if _, ok := apiOutput[exkey]; ok {
exkey = "_" + exkey
} else {
break
}
}
apiOutput[exkey] = exval
}
if applyExtendListener {
pkgconfig.ExtendGinOutput(ee, apiOutput)
}

View File

@@ -86,3 +86,28 @@ func MessageMatch(e error, matcher func(string) bool) bool {
return false
}
// OriginalError returns the lowest level error, probably the original/external error that was originally wrapped
func OriginalError(e error) error {
if e == nil {
return nil
}
//goland:noinspection GoTypeAssertionOnErrors
bmerr, ok := e.(*ExErr)
for !ok {
return e
}
for bmerr.OriginalError != nil {
bmerr = bmerr.OriginalError
}
if bmerr.WrappedErr != nil {
if werr, ok := bmerr.WrappedErr.(error); ok {
return werr
}
}
return bmerr
}

View File

@@ -4,15 +4,6 @@ import (
"sync"
)
type Method string
const (
MethodOutput Method = "OUTPUT"
MethodPrint Method = "PRINT"
MethodBuild Method = "BUILD"
MethodFatal Method = "FATAL"
)
type Listener = func(method Method, v *ExErr)
var listenerLock = sync.Mutex{}

View File

@@ -3,13 +3,17 @@ package ginext
import (
"github.com/gin-gonic/gin"
"net/http"
"strings"
)
func CorsMiddleware() gin.HandlerFunc {
func CorsMiddleware(allowheader []string, exposeheader []string) gin.HandlerFunc {
return func(c *gin.Context) {
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
c.Writer.Header().Set("Access-Control-Allow-Credentials", "true")
c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With")
c.Writer.Header().Set("Access-Control-Allow-Headers", strings.Join(allowheader, ", "))
if len(exposeheader) > 0 {
c.Writer.Header().Set("Access-Control-Expose-Headers", strings.Join(exposeheader, ", "))
}
c.Writer.Header().Set("Access-Control-Allow-Methods", "OPTIONS, GET, POST, PUT, PATCH, DELETE, COUNT")
if c.Request.Method == "OPTIONS" {

View File

@@ -19,13 +19,18 @@ type GinWrapper struct {
engine *gin.Engine
suppressGinLogs bool
opt Options
allowCors bool
corsAllowHeader []string
corsExposeHeader []string
ginDebug bool
bufferBody bool
requestTimeout time.Duration
listenerBeforeRequest []func(g *gin.Context)
listenerAfterRequest []func(g *gin.Context, resp HTTPResponse)
buildRequestBindError func(g *gin.Context, fieldtype string, err error) HTTPResponse
routeSpecs []ginRouteSpec
}
@@ -37,52 +42,64 @@ type ginRouteSpec struct {
}
type Options struct {
AllowCors *bool // Add cors handler to allow all CORS requests on the default http methods
GinDebug *bool // Set gin.debug to true (adds more logs)
BufferBody *bool // Buffers the input body stream, this way the ginext error handler can later include the whole request body
Timeout *time.Duration // The default handler timeout
ListenerBeforeRequest []func(g *gin.Context) // Register listener that are called before the handler method
ListenerAfterRequest []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method
AllowCors *bool // Add cors handler to allow all CORS requests on the default http methods
CorsAllowHeader *[]string // override the default values of Access-Control-Allow-Headers (AllowCors must be true)
CorsExposeHeader *[]string // return Access-Control-Expose-Headers (AllowCors must be true)
GinDebug *bool // Set gin.debug to true (adds more logs)
SuppressGinLogs *bool // Suppress our custom gin logs (even if GinDebug == true)
BufferBody *bool // Buffers the input body stream, this way the ginext error handler can later include the whole request body
Timeout *time.Duration // The default handler timeout
ListenerBeforeRequest []func(g *gin.Context) // Register listener that are called before the handler method
ListenerAfterRequest []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method
DebugTrimHandlerPrefixes []string // Trim these prefixes from the handler names in the debug print
DebugReplaceHandlerNames map[string]string // Replace handler names in debug output
BuildRequestBindError func(g *gin.Context, fieldtype string, err error) HTTPResponse // Override function which generates the HTTPResponse errors that are returned by the preContext..Start() methids
}
// NewEngine creates a new (wrapped) ginEngine
func NewEngine(opt Options) *GinWrapper {
ginDebug := langext.Coalesce(opt.GinDebug, true)
if ginDebug {
gin.SetMode(gin.DebugMode)
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
} else {
gin.SetMode(gin.ReleaseMode)
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
}
engine := gin.New()
wrapper := &GinWrapper{
engine: engine,
suppressGinLogs: false,
opt: opt,
suppressGinLogs: langext.Coalesce(opt.SuppressGinLogs, false),
allowCors: langext.Coalesce(opt.AllowCors, false),
ginDebug: langext.Coalesce(opt.GinDebug, true),
corsAllowHeader: langext.Coalesce(opt.CorsAllowHeader, []string{"Content-Type", "Content-Length", "Accept-Encoding", "X-CSRF-Token", "Authorization", "accept", "origin", "Cache-Control", "X-Requested-With"}),
corsExposeHeader: langext.Coalesce(opt.CorsExposeHeader, []string{}),
ginDebug: ginDebug,
bufferBody: langext.Coalesce(opt.BufferBody, false),
requestTimeout: langext.Coalesce(opt.Timeout, 24*time.Hour),
listenerBeforeRequest: opt.ListenerBeforeRequest,
listenerAfterRequest: opt.ListenerAfterRequest,
buildRequestBindError: langext.Conditional(opt.BuildRequestBindError == nil, defaultBuildRequestBindError, opt.BuildRequestBindError),
}
engine.RedirectFixedPath = false
engine.RedirectTrailingSlash = false
if wrapper.allowCors {
engine.Use(CorsMiddleware())
engine.Use(CorsMiddleware(wrapper.corsAllowHeader, wrapper.corsExposeHeader))
}
// do not debug-print routes
gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {}
if !wrapper.ginDebug {
gin.SetMode(gin.ReleaseMode)
if ginDebug && !wrapper.suppressGinLogs {
ginlogger := gin.Logger()
engine.Use(func(context *gin.Context) {
if !wrapper.suppressGinLogs {
ginlogger(context)
}
})
} else {
gin.SetMode(gin.DebugMode)
engine.Use(func(context *gin.Context) { ginlogger(context) })
}
return wrapper
}
@@ -184,6 +201,18 @@ func (w *GinWrapper) cleanMiddlewareName(fname string) string {
}
}
for _, pfx := range w.opt.DebugTrimHandlerPrefixes {
if strings.HasPrefix(fname, pfx) {
fname = fname[len(pfx):]
}
}
for k, v := range langext.ForceMap(w.opt.DebugReplaceHandlerNames) {
if strings.EqualFold(fname, k) {
fname = v
}
}
return fname
}
@@ -193,3 +222,17 @@ func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder {
w.engine.ServeHTTP(respRec, req)
return respRec
}
// ForwardRequest manually inserts a request into this router
// = behaves as if the request came from the outside (and writes the response to `writer`)
func (w *GinWrapper) ForwardRequest(writer http.ResponseWriter, req *http.Request) {
w.engine.ServeHTTP(writer, req)
}
func (w *GinWrapper) ListRoutes() []gin.RouteInfo {
return w.engine.Routes()
}
func defaultBuildRequestBindError(g *gin.Context, fieldtype string, err error) HTTPResponse {
return Error(err)
}

9
ginext/jsonFilter.go Normal file
View File

@@ -0,0 +1,9 @@
package ginext
import "github.com/gin-gonic/gin"
var jsonFilterKey = "goext.jsonfilter"
func SetJSONFilter(g *gin.Context, filter string) {
g.Set(jsonFilterKey, filter)
}

View File

@@ -15,16 +15,17 @@ import (
)
type PreContext struct {
ginCtx *gin.Context
wrapper *GinWrapper
uri any
query any
body any
rawbody *[]byte
form any
header any
timeout *time.Duration
persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func
ginCtx *gin.Context
wrapper *GinWrapper
uri any
query any
body any
rawbody *[]byte
form any
header any
timeout *time.Duration
persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func
ignoreWrongContentType bool
}
type preContextData struct {
@@ -71,6 +72,11 @@ func (pctx *PreContext) WithSession(sessionObj SessionObject) *PreContext {
return pctx
}
func (pctx *PreContext) IgnoreWrongContentType() *PreContext {
pctx.ignoreWrongContentType = true
return pctx
}
func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if pctx.uri != nil {
if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil {
@@ -78,7 +84,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailURI).
Str("struct_type", fmt.Sprintf("%T", pctx.uri)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "URI", err))
}
}
@@ -88,7 +94,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailQuery).
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "QUERY", err))
}
}
@@ -99,13 +105,15 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailJSON).
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err))
}
} else {
err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(Error(err))
if !pctx.ignoreWrongContentType {
err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body").
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err))
}
}
}
@@ -113,14 +121,14 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok {
v, err := brc.BufferedAll()
if err != nil {
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "BODY", err))
}
*pctx.rawbody = v
} else {
buf := &bytes.Buffer{}
_, err := io.Copy(buf, pctx.ginCtx.Request.Body)
if err != nil {
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "BODY", err))
}
*pctx.rawbody = buf.Bytes()
}
@@ -133,7 +141,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
} else if pctx.ginCtx.ContentType() == "application/x-www-form-urlencoded" {
if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil {
@@ -141,13 +149,15 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailFormData).
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
} else {
err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(Error(err))
if !pctx.ignoreWrongContentType {
err := exerr.New(exerr.TypeBindFailFormData, "missing form body").
Str("struct_type", fmt.Sprintf("%T", pctx.form)).
Build()
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err))
}
}
}
@@ -157,7 +167,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
WithType(exerr.TypeBindFailHeader).
Str("struct_type", fmt.Sprintf("%T", pctx.query)).
Build()
return nil, nil, langext.Ptr(Error(err))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "HEADER", err))
}
}
@@ -169,7 +179,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx)
if err != nil {
actx.Cancel()
return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build()))
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "INIT", err))
}
}

View File

@@ -1,12 +1,8 @@
package ginext
import (
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"os"
)
type cookieval struct {
@@ -40,461 +36,10 @@ type InspectableHTTPResponse interface {
Headers() []string
}
type jsonHTTPResponse struct {
statusCode int
data any
headers []headerval
cookies []cookieval
}
type HTTPErrorResponse interface {
HTTPResponse
func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender {
var f *string
if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" {
f = &jsonfilter
}
return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f}
}
func (j jsonHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Render(j.statusCode, j.jsonRenderer(g))
}
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j jsonHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j jsonHTTPResponse) BodyString(g *gin.Context) *string {
if str, err := j.jsonRenderer(g).RenderString(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonHTTPResponse) ContentType() string {
return "application/json"
}
func (j jsonHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type emptyHTTPResponse struct {
statusCode int
headers []headerval
cookies []cookieval
}
func (j emptyHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Status(j.statusCode)
}
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j emptyHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j emptyHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j emptyHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j emptyHTTPResponse) ContentType() string {
return ""
}
func (j emptyHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type textHTTPResponse struct {
statusCode int
data string
headers []headerval
cookies []cookieval
}
func (j textHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.String(j.statusCode, "%s", j.data)
}
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j textHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j textHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j textHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(j.data)
}
func (j textHTTPResponse) ContentType() string {
return "text/plain"
}
func (j textHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type dataHTTPResponse struct {
statusCode int
data []byte
contentType string
headers []headerval
cookies []cookieval
}
func (j dataHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.contentType, j.data)
}
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j dataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j dataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j dataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j dataHTTPResponse) ContentType() string {
return j.contentType
}
func (j dataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type fileHTTPResponse struct {
mimetype string
filepath string
filename *string
headers []headerval
cookies []cookieval
}
func (j fileHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.File(j.filepath)
}
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j fileHTTPResponse) IsSuccess() bool {
return true
}
func (j fileHTTPResponse) Statuscode() int {
return 200
}
func (j fileHTTPResponse) BodyString(*gin.Context) *string {
data, err := os.ReadFile(j.filepath)
if err != nil {
return nil
}
return langext.Ptr(string(data))
}
func (j fileHTTPResponse) ContentType() string {
return j.mimetype
}
func (j fileHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type downloadDataHTTPResponse struct {
statusCode int
mimetype string
data []byte
filename *string
headers []headerval
cookies []cookieval
}
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.mimetype, j.data)
}
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j downloadDataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j downloadDataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j downloadDataHTTPResponse) ContentType() string {
return j.mimetype
}
func (j downloadDataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type redirectHTTPResponse struct {
statusCode int
url string
headers []headerval
cookies []cookieval
}
func (j redirectHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Redirect(j.statusCode, j.url)
}
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j redirectHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j redirectHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j redirectHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j redirectHTTPResponse) ContentType() string {
return ""
}
func (j redirectHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
type jsonAPIErrResponse struct {
err *exerr.ExErr
headers []headerval
cookies []cookieval
}
func (j jsonAPIErrResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
j.err.Output(g)
j.err.CallListener(exerr.MethodOutput)
}
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonAPIErrResponse) IsSuccess() bool {
return false
}
func (j jsonAPIErrResponse) Statuscode() int {
return langext.Coalesce(j.err.RecursiveStatuscode(), 0)
}
func (j jsonAPIErrResponse) BodyString(*gin.Context) *string {
if str, err := j.err.ToDefaultAPIJson(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonAPIErrResponse) ContentType() string {
return "application/json"
}
func (j jsonAPIErrResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func (j jsonAPIErrResponse) Unwrap() error {
return j.err
}
func Status(sc int) HTTPResponse {
return &emptyHTTPResponse{statusCode: sc}
}
func JSON(sc int, data any) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data}
}
func Data(sc int, contentType string, data []byte) HTTPResponse {
return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data}
}
func Text(sc int, data string) HTTPResponse {
return &textHTTPResponse{statusCode: sc, data: data}
}
func File(mimetype string, filepath string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath}
}
func Download(mimetype string, filepath string, filename string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
}
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
}
func Redirect(sc int, newURL string) HTTPResponse {
return &redirectHTTPResponse{statusCode: sc, url: newURL}
}
func Error(e error) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(e),
}
}
func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
}
Error() error
}
func NotImplemented() HTTPResponse {

58
ginext/responseData.go Normal file
View File

@@ -0,0 +1,58 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type dataHTTPResponse struct {
statusCode int
data []byte
contentType string
headers []headerval
cookies []cookieval
}
func (j dataHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.contentType, j.data)
}
func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j dataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j dataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j dataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j dataHTTPResponse) ContentType() string {
return j.contentType
}
func (j dataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Data(sc int, contentType string, data []byte) HTTPResponse {
return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data}
}

View File

@@ -0,0 +1,64 @@
package ginext
import (
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type downloadDataHTTPResponse struct {
statusCode int
mimetype string
data []byte
filename *string
headers []headerval
cookies []cookieval
}
func (j downloadDataHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Data(j.statusCode, j.mimetype, j.data)
}
func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j downloadDataHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j downloadDataHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(string(j.data))
}
func (j downloadDataHTTPResponse) ContentType() string {
return j.mimetype
}
func (j downloadDataHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse {
return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename}
}

56
ginext/responseEmpty.go Normal file
View File

@@ -0,0 +1,56 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type emptyHTTPResponse struct {
statusCode int
headers []headerval
cookies []cookieval
}
func (j emptyHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Status(j.statusCode)
}
func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j emptyHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j emptyHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j emptyHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j emptyHTTPResponse) ContentType() string {
return ""
}
func (j emptyHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Status(sc int) HTTPResponse {
return &emptyHTTPResponse{statusCode: sc}
}

73
ginext/responseFile.go Normal file
View File

@@ -0,0 +1,73 @@
package ginext
import (
"fmt"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"os"
)
type fileHTTPResponse struct {
mimetype string
filepath string
filename *string
headers []headerval
cookies []cookieval
}
func (j fileHTTPResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later...
if j.filename != nil {
g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename))
}
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.File(j.filepath)
}
func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j fileHTTPResponse) IsSuccess() bool {
return true
}
func (j fileHTTPResponse) Statuscode() int {
return 200
}
func (j fileHTTPResponse) BodyString(*gin.Context) *string {
data, err := os.ReadFile(j.filepath)
if err != nil {
return nil
}
return langext.Ptr(string(data))
}
func (j fileHTTPResponse) ContentType() string {
return j.mimetype
}
func (j fileHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func File(mimetype string, filepath string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath}
}
func Download(mimetype string, filepath string, filename string) HTTPResponse {
return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename}
}

78
ginext/responseJson.go Normal file
View File

@@ -0,0 +1,78 @@
package ginext
import (
"github.com/gin-gonic/gin"
json "gogs.mikescher.com/BlackForestBytes/goext/gojson"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type jsonHTTPResponse struct {
statusCode int
data any
headers []headerval
cookies []cookieval
filterOverride *string
}
func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender {
var f *string
if jsonfilter := g.GetString(jsonFilterKey); jsonfilter != "" {
f = &jsonfilter
}
if j.filterOverride != nil {
f = j.filterOverride
}
return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f}
}
func (j jsonHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Render(j.statusCode, j.jsonRenderer(g))
}
func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j jsonHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j jsonHTTPResponse) BodyString(g *gin.Context) *string {
if str, err := j.jsonRenderer(g).RenderString(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonHTTPResponse) ContentType() string {
return "application/json"
}
func (j jsonHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func JSON(sc int, data any) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data}
}
func JSONWithFilter(sc int, data any, f string) HTTPResponse {
return &jsonHTTPResponse{statusCode: sc, data: data, filterOverride: &f}
}

81
ginext/responseJsonAPI.go Normal file
View File

@@ -0,0 +1,81 @@
package ginext
import (
"context"
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type jsonAPIErrResponse struct {
err *exerr.ExErr
headers []headerval
cookies []cookieval
}
func (j jsonAPIErrResponse) Error() error {
return j.err
}
func (j jsonAPIErrResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
exerr.Get(j.err).Output(context.Background(), g)
j.err.CallListener(exerr.MethodOutput)
}
func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j jsonAPIErrResponse) IsSuccess() bool {
return false
}
func (j jsonAPIErrResponse) Statuscode() int {
return langext.Coalesce(j.err.RecursiveStatuscode(), 0)
}
func (j jsonAPIErrResponse) BodyString(*gin.Context) *string {
if str, err := j.err.ToDefaultAPIJson(); err == nil {
return &str
} else {
return nil
}
}
func (j jsonAPIErrResponse) ContentType() string {
return "application/json"
}
func (j jsonAPIErrResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func (j jsonAPIErrResponse) Unwrap() error {
return j.err
}
func Error(e error) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(e),
}
}
func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse {
return &jsonAPIErrResponse{
err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()),
}
}

View File

@@ -0,0 +1,57 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type redirectHTTPResponse struct {
statusCode int
url string
headers []headerval
cookies []cookieval
}
func (j redirectHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.Redirect(j.statusCode, j.url)
}
func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j redirectHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j redirectHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j redirectHTTPResponse) BodyString(*gin.Context) *string {
return nil
}
func (j redirectHTTPResponse) ContentType() string {
return ""
}
func (j redirectHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Redirect(sc int, newURL string) HTTPResponse {
return &redirectHTTPResponse{statusCode: sc, url: newURL}
}

View File

@@ -0,0 +1,72 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"io"
"net/http"
"time"
)
type seekableResponse struct {
data io.ReadSeeker
contentType string
filename string
headers []headerval
cookies []cookieval
}
func (j seekableResponse) Write(g *gin.Context) {
g.Header("Content-Type", j.contentType) // if we don't set it here http.ServeContent does weird sniffing later...
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
http.ServeContent(g.Writer, g.Request, j.filename, time.Unix(0, 0), j.data)
if clsr, ok := j.data.(io.ReadSeekCloser); ok {
err := clsr.Close()
if err != nil {
exerr.Wrap(err, "failed to close io.ReadSeerkClose in ginext.Seekable").Str("filename", j.filename).Print()
}
}
}
func (j seekableResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j seekableResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j seekableResponse) IsSuccess() bool {
return true
}
func (j seekableResponse) Statuscode() int {
return 200
}
func (j seekableResponse) BodyString(*gin.Context) *string {
return langext.Ptr("(seekable)")
}
func (j seekableResponse) ContentType() string {
return j.contentType
}
func (j seekableResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Seekable(filename string, contentType string, data io.ReadSeeker) HTTPResponse {
return &seekableResponse{filename: filename, contentType: contentType, data: data}
}

57
ginext/responseText.go Normal file
View File

@@ -0,0 +1,57 @@
package ginext
import (
"github.com/gin-gonic/gin"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type textHTTPResponse struct {
statusCode int
data string
headers []headerval
cookies []cookieval
}
func (j textHTTPResponse) Write(g *gin.Context) {
for _, v := range j.headers {
g.Header(v.Key, v.Val)
}
for _, v := range j.cookies {
g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly)
}
g.String(j.statusCode, "%s", j.data)
}
func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse {
j.headers = append(j.headers, headerval{k, v})
return j
}
func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse {
j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly})
return j
}
func (j textHTTPResponse) IsSuccess() bool {
return j.statusCode >= 200 && j.statusCode <= 399
}
func (j textHTTPResponse) Statuscode() int {
return j.statusCode
}
func (j textHTTPResponse) BodyString(*gin.Context) *string {
return langext.Ptr(j.data)
}
func (j textHTTPResponse) ContentType() string {
return "text/plain"
}
func (j textHTTPResponse) Headers() []string {
return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val })
}
func Text(sc int, data string) HTTPResponse {
return &textHTTPResponse{statusCode: sc, data: data}
}

View File

@@ -57,7 +57,7 @@ func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper
}
func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper {
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
return w.Use(func(g *gin.Context) { g.Set(jsonFilterKey, filter) })
}
func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder {
@@ -112,7 +112,7 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
}
func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder {
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
return w.Use(func(g *gin.Context) { g.Set(jsonFilterKey, filter) })
}
func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {

55
go.mod
View File

@@ -1,54 +1,61 @@
module gogs.mikescher.com/BlackForestBytes/goext
go 1.21
go 1.23
require (
github.com/gin-gonic/gin v1.9.1
github.com/gin-gonic/gin v1.10.0
github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.-
github.com/jmoiron/sqlx v1.3.5
github.com/rs/xid v1.5.0
github.com/rs/zerolog v1.32.0
go.mongodb.org/mongo-driver v1.14.0
golang.org/x/crypto v0.20.0
golang.org/x/sys v0.17.0
golang.org/x/term v0.17.0
github.com/jmoiron/sqlx v1.4.0
github.com/rs/xid v1.6.0
github.com/rs/zerolog v1.33.0
go.mongodb.org/mongo-driver v1.17.1
golang.org/x/crypto v0.28.0
golang.org/x/sys v0.26.0
golang.org/x/term v0.25.0
)
require (
github.com/bytedance/sonic v1.11.2 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/chenzhuoyu/iasm v0.9.1 // indirect
github.com/disintegration/imaging v1.6.2
github.com/jung-kurt/gofpdf v1.16.2
golang.org/x/sync v0.8.0
)
require (
github.com/bytedance/sonic v1.12.3 // indirect
github.com/bytedance/sonic/loader v0.2.0 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
github.com/gabriel-vasile/mimetype v1.4.5 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.19.0 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/go-playground/validator/v10 v10.22.1 // indirect
github.com/goccy/go-json v0.10.3 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/uuid v1.5.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.17.7 // indirect
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/klauspost/compress v1.17.10 // indirect
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/pelletier/go-toml/v2 v2.1.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
golang.org/x/arch v0.7.0 // indirect
golang.org/x/net v0.21.0 // indirect
golang.org/x/sync v0.6.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/protobuf v1.32.0 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
golang.org/x/arch v0.11.0 // indirect
golang.org/x/image v0.21.0 // indirect
golang.org/x/net v0.30.0 // indirect
golang.org/x/text v0.19.0 // indirect
google.golang.org/protobuf v1.34.2 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
modernc.org/libc v1.37.6 // indirect
modernc.org/mathutil v1.6.0 // indirect

194
go.sum
View File

@@ -1,32 +1,29 @@
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo=
github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.1 h1:JC0+6c9FoWYYxakaoa+c5QTtJeiSZNeByOBhXtAFSn4=
github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A=
github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/bytedance/sonic v1.12.3 h1:W2MGa7RCU1QTeYRTPE3+88mVC0yXmsRQRChiyVocVjU=
github.com/bytedance/sonic v1.12.3/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM=
github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4=
github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
@@ -35,95 +32,80 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE=
github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74=
github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U=
github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4=
github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU=
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg=
github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc=
github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0=
github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0=
github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI=
github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0=
github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
@@ -134,49 +116,33 @@ github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk=
go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo=
go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80=
go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
go.mongodb.org/mongo-driver v1.17.1 h1:Wic5cJIwJgSpBhe3lx3+/RybR5PiYRMpVFgO7cOHyIM=
go.mongodb.org/mongo-driver v1.17.1/go.mod h1:wwWm/+BuOddhcq3n68LKRmgk2wXzmF6s0SFOa0GINL4=
golang.org/x/arch v0.11.0 h1:KXV8WWKCXm6tRpLirl2szsO5j/oOODwZf4hATmGVNs4=
golang.org/x/arch v0.11.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg=
golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ=
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s=
golang.org/x/image v0.21.0/go.mod h1:vUbsLavqK/W303ZroQQVKQ+Af3Yl6Uz1Ppu5J/cLz78=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4=
golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -184,33 +150,24 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE=
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24=
golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@@ -225,4 +182,3 @@ modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E=
modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ=
modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@@ -1,5 +1,5 @@
package goext
const GoextVersion = "0.0.397"
const GoextVersion = "0.0.525"
const GoextVersionTimestamp = "2024-03-04T12:17:10+0100"
const GoextVersionTimestamp = "2024-10-05T02:45:20+0200"

View File

@@ -217,6 +217,7 @@ type decodeState struct {
savedError error
useNumber bool
disallowUnknownFields bool
tagkey *string
}
// readIndex returns the position of the last byte read.
@@ -652,7 +653,11 @@ func (d *decodeState) object(v reflect.Value) error {
v.Set(reflect.MakeMap(t))
}
case reflect.Struct:
fields = cachedTypeFields(t)
tagkey := "json"
if d.tagkey != nil {
tagkey = *d.tagkey
}
fields = cachedTypeFields(t, tagkey)
// ok
default:
d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)})

View File

@@ -382,7 +382,12 @@ func isEmptyValue(v reflect.Value) bool {
}
func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) {
valueEncoder(v)(e, v, opts)
tagkey := "json"
if opts.tagkey != nil {
tagkey = *opts.tagkey
}
valueEncoder(v, tagkey)(e, v, opts)
}
type encOpts struct {
@@ -397,20 +402,22 @@ type encOpts struct {
// filter matches jsonfilter tag of struct
// marshals if no jsonfilter is set or otherwise if jsonfilter has the filter value
filter *string
// use different tag instead of "json"
tagkey *string
}
type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts)
var encoderCache sync.Map // map[reflect.Type]encoderFunc
func valueEncoder(v reflect.Value) encoderFunc {
func valueEncoder(v reflect.Value, tagkey string) encoderFunc {
if !v.IsValid() {
return invalidValueEncoder
}
return typeEncoder(v.Type())
return typeEncoder(v.Type(), tagkey)
}
func typeEncoder(t reflect.Type) encoderFunc {
func typeEncoder(t reflect.Type, tagkey string) encoderFunc {
if fi, ok := encoderCache.Load(t); ok {
return fi.(encoderFunc)
}
@@ -433,7 +440,7 @@ func typeEncoder(t reflect.Type) encoderFunc {
}
// Compute the real encoder and replace the indirect func with it.
f = newTypeEncoder(t, true)
f = newTypeEncoder(t, true, tagkey)
wg.Done()
encoderCache.Store(t, f)
return f
@@ -446,19 +453,19 @@ var (
// newTypeEncoder constructs an encoderFunc for a type.
// The returned encoder only checks CanAddr when allowAddr is true.
func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc {
func newTypeEncoder(t reflect.Type, allowAddr bool, tagkey string) encoderFunc {
// If we have a non-pointer value whose type implements
// Marshaler with a value receiver, then we're better off taking
// the address of the value - otherwise we end up with an
// allocation as we cast the value to an interface.
if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(marshalerType) {
return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false))
return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false, tagkey))
}
if t.Implements(marshalerType) {
return marshalerEncoder
}
if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(textMarshalerType) {
return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false))
return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false, tagkey))
}
if t.Implements(textMarshalerType) {
return textMarshalerEncoder
@@ -480,15 +487,15 @@ func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc {
case reflect.Interface:
return interfaceEncoder
case reflect.Struct:
return newStructEncoder(t)
return newStructEncoder(t, tagkey)
case reflect.Map:
return newMapEncoder(t)
return newMapEncoder(t, tagkey)
case reflect.Slice:
return newSliceEncoder(t)
return newSliceEncoder(t, tagkey)
case reflect.Array:
return newArrayEncoder(t)
return newArrayEncoder(t, tagkey)
case reflect.Pointer:
return newPtrEncoder(t)
return newPtrEncoder(t, tagkey)
default:
return unsupportedTypeEncoder
}
@@ -781,7 +788,7 @@ FieldLoop:
if f.omitEmpty && isEmptyValue(fv) {
continue
} else if opts.filter != nil && len(f.jsonfilter) > 0 && !f.jsonfilter.Contains(*opts.filter) {
} else if !matchesJSONFilter(f.jsonfilter, opts.filter) {
continue
}
e.WriteByte(next)
@@ -801,8 +808,32 @@ FieldLoop:
}
}
func newStructEncoder(t reflect.Type) encoderFunc {
se := structEncoder{fields: cachedTypeFields(t)}
func matchesJSONFilter(filter jsonfilter, value *string) bool {
if len(filter) == 0 {
return true // no filter in struct
}
if value == nil || *value == "" {
return false // no filter set, but struct has filter, return false
}
if len(filter) == 1 && filter[0] == "-" {
return false
}
if filter.Contains(*value) {
return true
}
if filter.Contains("*") {
return true
}
return false
}
func newStructEncoder(t reflect.Type, tagkey string) encoderFunc {
se := structEncoder{fields: cachedTypeFields(t, tagkey)}
return se.encode
}
@@ -855,7 +886,7 @@ func (me mapEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
e.ptrLevel--
}
func newMapEncoder(t reflect.Type) encoderFunc {
func newMapEncoder(t reflect.Type, tagkey string) encoderFunc {
switch t.Key().Kind() {
case reflect.String,
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
@@ -865,7 +896,7 @@ func newMapEncoder(t reflect.Type) encoderFunc {
return unsupportedTypeEncoder
}
}
me := mapEncoder{typeEncoder(t.Elem())}
me := mapEncoder{typeEncoder(t.Elem(), tagkey)}
return me.encode
}
@@ -936,7 +967,7 @@ func (se sliceEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
e.ptrLevel--
}
func newSliceEncoder(t reflect.Type) encoderFunc {
func newSliceEncoder(t reflect.Type, tagkey string) encoderFunc {
// Byte slices get special treatment; arrays don't.
if t.Elem().Kind() == reflect.Uint8 {
p := reflect.PointerTo(t.Elem())
@@ -944,7 +975,7 @@ func newSliceEncoder(t reflect.Type) encoderFunc {
return encodeByteSlice
}
}
enc := sliceEncoder{newArrayEncoder(t)}
enc := sliceEncoder{newArrayEncoder(t, tagkey)}
return enc.encode
}
@@ -964,8 +995,8 @@ func (ae arrayEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
e.WriteByte(']')
}
func newArrayEncoder(t reflect.Type) encoderFunc {
enc := arrayEncoder{typeEncoder(t.Elem())}
func newArrayEncoder(t reflect.Type, tagkey string) encoderFunc {
enc := arrayEncoder{typeEncoder(t.Elem(), tagkey)}
return enc.encode
}
@@ -992,8 +1023,8 @@ func (pe ptrEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) {
e.ptrLevel--
}
func newPtrEncoder(t reflect.Type) encoderFunc {
enc := ptrEncoder{typeEncoder(t.Elem())}
func newPtrEncoder(t reflect.Type, tagkey string) encoderFunc {
enc := ptrEncoder{typeEncoder(t.Elem(), tagkey)}
return enc.encode
}
@@ -1270,7 +1301,7 @@ func (x byIndex) Less(i, j int) bool {
// typeFields returns a list of fields that JSON should recognize for the given type.
// The algorithm is breadth-first search over the set of structs to include - the top struct
// and then any reachable anonymous structs.
func typeFields(t reflect.Type) structFields {
func typeFields(t reflect.Type, tagkey string) structFields {
// Anonymous fields to explore at the current level and the next.
current := []field{}
next := []field{{typ: t}}
@@ -1315,7 +1346,7 @@ func typeFields(t reflect.Type) structFields {
// Ignore unexported non-embedded fields.
continue
}
tag := sf.Tag.Get("json")
tag := sf.Tag.Get(tagkey)
if tag == "-" {
continue
}
@@ -1326,7 +1357,7 @@ func typeFields(t reflect.Type) structFields {
var jsonfilter []string
jsonfilterTag := sf.Tag.Get("jsonfilter")
if jsonfilterTag != "" && jsonfilterTag != "-" {
if jsonfilterTag != "" {
jsonfilter = strings.Split(jsonfilterTag, ",")
}
@@ -1449,7 +1480,7 @@ func typeFields(t reflect.Type) structFields {
for i := range fields {
f := &fields[i]
f.encoder = typeEncoder(typeByIndex(t, f.index))
f.encoder = typeEncoder(typeByIndex(t, f.index), tagkey)
}
nameIndex := make(map[string]int, len(fields))
for i, field := range fields {
@@ -1474,13 +1505,26 @@ func dominantField(fields []field) (field, bool) {
return fields[0], true
}
var fieldCache sync.Map // map[reflect.Type]structFields
var fieldCache sync.Map // map[string]map[reflect.Type]structFields
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
func cachedTypeFields(t reflect.Type) structFields {
if f, ok := fieldCache.Load(t); ok {
func cachedTypeFields(t reflect.Type, tagkey string) structFields {
if m0, ok := fieldCache.Load(tagkey); ok {
if f, ok := m0.(*sync.Map).Load(t); ok {
return f.(structFields)
}
f, _ := m0.(*sync.Map).LoadOrStore(t, typeFields(t, tagkey))
return f.(structFields)
} else {
m0 := &sync.Map{}
f, _ := m0.LoadOrStore(t, typeFields(t, tagkey))
fieldCache.Store(tagkey, m0)
return f.(structFields)
}
f, _ := fieldCache.LoadOrStore(t, typeFields(t))
return f.(structFields)
}

View File

@@ -41,6 +41,9 @@ func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
// non-ignored, exported fields in the destination.
func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true }
// TagKey sets a different TagKey (instead of "json")
func (dec *Decoder) TagKey(v string) { dec.d.tagkey = &v }
// Decode reads the next JSON-encoded value from its
// input and stores it in the value pointed to by v.
//

3
imageext/enums.go Normal file
View File

@@ -0,0 +1,3 @@
package imageext
//go:generate go run ../_gen/enum-generate.go -- enums_gen.go

216
imageext/enums_gen.go Normal file
View File

@@ -0,0 +1,216 @@
// Code generated by enum-generate.go DO NOT EDIT.
package imageext
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import "gogs.mikescher.com/BlackForestBytes/goext/enums"
const ChecksumEnumGenerator = "1da5383c33ee442fd0b899369053f66bdc85bed2dbf906949d3edfeedfe13340" // GoExtVersion: 0.0.449
// ================================ ImageFit ================================
//
// File: image.go
// StringEnum: true
// DescrEnum: false
// DataEnum: false
//
var __ImageFitValues = []ImageFit{
ImageFitStretch,
ImageFitCover,
ImageFitContainCenter,
ImageFitContainTopLeft,
ImageFitContainTopRight,
ImageFitContainBottomLeft,
ImageFitContainBottomRight,
}
var __ImageFitVarnames = map[ImageFit]string{
ImageFitStretch: "ImageFitStretch",
ImageFitCover: "ImageFitCover",
ImageFitContainCenter: "ImageFitContainCenter",
ImageFitContainTopLeft: "ImageFitContainTopLeft",
ImageFitContainTopRight: "ImageFitContainTopRight",
ImageFitContainBottomLeft: "ImageFitContainBottomLeft",
ImageFitContainBottomRight: "ImageFitContainBottomRight",
}
func (e ImageFit) Valid() bool {
return langext.InArray(e, __ImageFitValues)
}
func (e ImageFit) Values() []ImageFit {
return __ImageFitValues
}
func (e ImageFit) ValuesAny() []any {
return langext.ArrCastToAny(__ImageFitValues)
}
func (e ImageFit) ValuesMeta() []enums.EnumMetaValue {
return ImageFitValuesMeta()
}
func (e ImageFit) String() string {
return string(e)
}
func (e ImageFit) VarName() string {
if d, ok := __ImageFitVarnames[e]; ok {
return d
}
return ""
}
func (e ImageFit) TypeName() string {
return "ImageFit"
}
func (e ImageFit) PackageName() string {
return "media"
}
func (e ImageFit) Meta() enums.EnumMetaValue {
return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}
}
func ParseImageFit(vv string) (ImageFit, bool) {
for _, ev := range __ImageFitValues {
if string(ev) == vv {
return ev, true
}
}
return "", false
}
func ImageFitValues() []ImageFit {
return __ImageFitValues
}
func ImageFitValuesMeta() []enums.EnumMetaValue {
return []enums.EnumMetaValue{
ImageFitStretch.Meta(),
ImageFitCover.Meta(),
ImageFitContainCenter.Meta(),
ImageFitContainTopLeft.Meta(),
ImageFitContainTopRight.Meta(),
ImageFitContainBottomLeft.Meta(),
ImageFitContainBottomRight.Meta(),
}
}
// ================================ ImageCompresson ================================
//
// File: image.go
// StringEnum: true
// DescrEnum: false
// DataEnum: false
//
var __ImageCompressonValues = []ImageCompresson{
CompressionPNGNone,
CompressionPNGSpeed,
CompressionPNGBest,
CompressionJPEG100,
CompressionJPEG90,
CompressionJPEG80,
CompressionJPEG70,
CompressionJPEG60,
CompressionJPEG50,
CompressionJPEG25,
CompressionJPEG10,
CompressionJPEG1,
}
var __ImageCompressonVarnames = map[ImageCompresson]string{
CompressionPNGNone: "CompressionPNGNone",
CompressionPNGSpeed: "CompressionPNGSpeed",
CompressionPNGBest: "CompressionPNGBest",
CompressionJPEG100: "CompressionJPEG100",
CompressionJPEG90: "CompressionJPEG90",
CompressionJPEG80: "CompressionJPEG80",
CompressionJPEG70: "CompressionJPEG70",
CompressionJPEG60: "CompressionJPEG60",
CompressionJPEG50: "CompressionJPEG50",
CompressionJPEG25: "CompressionJPEG25",
CompressionJPEG10: "CompressionJPEG10",
CompressionJPEG1: "CompressionJPEG1",
}
func (e ImageCompresson) Valid() bool {
return langext.InArray(e, __ImageCompressonValues)
}
func (e ImageCompresson) Values() []ImageCompresson {
return __ImageCompressonValues
}
func (e ImageCompresson) ValuesAny() []any {
return langext.ArrCastToAny(__ImageCompressonValues)
}
func (e ImageCompresson) ValuesMeta() []enums.EnumMetaValue {
return ImageCompressonValuesMeta()
}
func (e ImageCompresson) String() string {
return string(e)
}
func (e ImageCompresson) VarName() string {
if d, ok := __ImageCompressonVarnames[e]; ok {
return d
}
return ""
}
func (e ImageCompresson) TypeName() string {
return "ImageCompresson"
}
func (e ImageCompresson) PackageName() string {
return "media"
}
func (e ImageCompresson) Meta() enums.EnumMetaValue {
return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}
}
func ParseImageCompresson(vv string) (ImageCompresson, bool) {
for _, ev := range __ImageCompressonValues {
if string(ev) == vv {
return ev, true
}
}
return "", false
}
func ImageCompressonValues() []ImageCompresson {
return __ImageCompressonValues
}
func ImageCompressonValuesMeta() []enums.EnumMetaValue {
return []enums.EnumMetaValue{
CompressionPNGNone.Meta(),
CompressionPNGSpeed.Meta(),
CompressionPNGBest.Meta(),
CompressionJPEG100.Meta(),
CompressionJPEG90.Meta(),
CompressionJPEG80.Meta(),
CompressionJPEG70.Meta(),
CompressionJPEG60.Meta(),
CompressionJPEG50.Meta(),
CompressionJPEG25.Meta(),
CompressionJPEG10.Meta(),
CompressionJPEG1.Meta(),
}
}
// ================================ ================= ================================
func AllPackageEnums() []enums.Enum {
return []enums.Enum{
ImageFitStretch, // ImageFit
CompressionPNGNone, // ImageCompresson
}
}

321
imageext/image.go Normal file
View File

@@ -0,0 +1,321 @@
package imageext
import (
"bytes"
"fmt"
"github.com/disintegration/imaging"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/mathext"
"image"
"image/color"
"image/draw"
"image/jpeg"
"image/png"
"io"
"math"
)
type ImageFit string //@enum:type
const (
ImageFitStretch ImageFit = "STRETCH"
ImageFitCover ImageFit = "COVER"
ImageFitContainCenter ImageFit = "CONTAIN_CENTER"
ImageFitContainTopLeft ImageFit = "CONTAIN_TOPLEFT"
ImageFitContainTopRight ImageFit = "CONTAIN_TOPRIGHT"
ImageFitContainBottomLeft ImageFit = "CONTAIN_BOTTOMLEFT"
ImageFitContainBottomRight ImageFit = "CONTAIN_BOTTOMRIGHT"
)
type ImageCrop struct { // all crop values are percentages!
CropX float64 `bson:"cropX" json:"cropX"`
CropY float64 `bson:"cropY" json:"cropY"`
CropWidth float64 `bson:"cropWidth" json:"cropWidth"`
CropHeight float64 `bson:"cropHeight" json:"cropHeight"`
}
type ImageCompresson string //@enum:type
const (
CompressionPNGNone ImageCompresson = "PNG_NONE"
CompressionPNGSpeed ImageCompresson = "PNG_SPEED"
CompressionPNGBest ImageCompresson = "PNG_BEST"
CompressionJPEG100 ImageCompresson = "JPEG_100"
CompressionJPEG90 ImageCompresson = "JPEG_090"
CompressionJPEG80 ImageCompresson = "JPEG_080"
CompressionJPEG70 ImageCompresson = "JPEG_070"
CompressionJPEG60 ImageCompresson = "JPEG_060"
CompressionJPEG50 ImageCompresson = "JPEG_050"
CompressionJPEG25 ImageCompresson = "JPEG_025"
CompressionJPEG10 ImageCompresson = "JPEG_010"
CompressionJPEG1 ImageCompresson = "JPEG_001"
)
func CropImage(img image.Image, px float64, py float64, pw float64, ph float64) (image.Image, error) {
type subImager interface {
SubImage(r image.Rectangle) image.Image
}
x := int(float64(img.Bounds().Dx()) * px)
y := int(float64(img.Bounds().Dy()) * py)
w := int(float64(img.Bounds().Dx()) * pw)
h := int(float64(img.Bounds().Dy()) * ph)
if simg, ok := img.(subImager); ok {
return simg.SubImage(image.Rect(x, y, x+w, y+h)), nil
} else {
bfr1 := bytes.Buffer{}
err := png.Encode(&bfr1, img)
if err != nil {
return nil, exerr.Wrap(err, "").Build()
}
imgPNG, err := png.Decode(&bfr1)
if err != nil {
return nil, exerr.Wrap(err, "").Build()
}
return imgPNG.(subImager).SubImage(image.Rect(x, y, w+w, y+h)), nil
}
}
func EncodeImage(img image.Image, compression ImageCompresson) (bytes.Buffer, string, error) {
var err error
bfr := bytes.Buffer{}
switch compression {
case CompressionPNGNone:
enc := &png.Encoder{CompressionLevel: png.NoCompression}
err = enc.Encode(&bfr, img)
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/png", nil
case CompressionPNGSpeed:
enc := &png.Encoder{CompressionLevel: png.BestSpeed}
err = enc.Encode(&bfr, img)
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/png", nil
case CompressionPNGBest:
enc := &png.Encoder{CompressionLevel: png.BestCompression}
err = enc.Encode(&bfr, img)
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/png", nil
case CompressionJPEG100:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 100})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG90:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 90})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG80:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 80})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG70:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 70})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG60:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 60})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG50:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 50})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG25:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 25})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG10:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 10})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
case CompressionJPEG1:
err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 1})
if err != nil {
return bytes.Buffer{}, "", exerr.Wrap(err, "").Build()
}
return bfr, "image/jpeg", nil
default:
return bytes.Buffer{}, "", exerr.New(exerr.TypeInternal, "unknown compression method: "+compression.String()).Build()
}
}
func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fillColor color.Color) (image.Image, PercentageRectangle, error) {
iw := img.Bounds().Size().X
ih := img.Bounds().Size().Y
// [iw, ih] is the size of the image
// [bbw, bbh] is the target bounding box,
// - it specifies the target ratio
// - and the maximal target resolution
facW := float64(iw) / bbw
facH := float64(ih) / bbh
// facW is the ratio between iw and bbw
// - it is the factor by which the bounding box must be multiplied to reach the image size (in the x-axis)
//
// (same is true for facH, but for the height and y-axis)
if fit == ImageFitCover {
// image-fit:cover completely fills the target-bounding-box, it potentially cuts parts of the image away
// we use the smaller (!) value of facW and facH, because we want to have the smallest possible destination rect (due to file size)
// and because the image is made to completely fill the bounding-box, the smaller factor (= teh dimension the image is stretched more) is relevant
// but we cap `fac` at 1 (can be larger than 1)
// a value >1 would mean the final image resolution is biger than the bounding box, which we do not want.
// if the initial image (iw, ih) is already bigger than the bounding box (bbw, bbh), facW and facH are always >1 and fac will be 1
// which means we will simply use the bounding box as destination rect (and scale the image down)
fac := mathext.Clamp(mathext.Min(facW, facH), 0.0, 1.0)
// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio)
w := int(math.Round(bbw * fac))
h := int(math.Round(bbh * fac))
img = imaging.Fill(img, w, h, imaging.Center, imaging.Lanczos)
newImg := image.NewRGBA(image.Rect(0, 0, w, h))
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over)
return newImg, PercentageRectangle{0, 0, 1, 1}, nil
}
if fit == ImageFitContainCenter || fit == ImageFitContainTopLeft || fit == ImageFitContainTopRight || fit == ImageFitContainBottomLeft || fit == ImageFitContainBottomRight {
// image-fit:contain fills the target-bounding-box with the image, there is potentially empty-space, it potentially cuts parts of the image away
// we use the bigger (!) value of facW and facH,
// because the image is made to fit the bounding-box, the bigger factor (= the dimension the image is stretched less) is relevant
// but we cap `fac` at 1 (can be larger than 1)
// a value >1 would mean the final image resolution is biger than the bounding box, which we do not want.
// if the initial image (iw, ih) is already bigger than the bounding box (bbw, bbh), facW and facH are always >1 and fac will be 1
// which means we will simply use the bounding box as destination rect (and scale the image down)
facOut := mathext.Clamp(mathext.Max(facW, facH), 0.0, 1.0)
// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio)
// [ow|oh] ==> size of output image (same ratio as bounding box [bbw|bbh])
ow := int(math.Round(bbw * facOut))
oh := int(math.Round(bbh * facOut))
facScale := mathext.Min(float64(ow)/float64(iw), float64(oh)/float64(ih))
// [dw|dh] ==> size of destination rect (where to draw source in output image) (same ratio as input image [iw|ih])
dw := int(math.Round(float64(iw) * facScale))
dh := int(math.Round(float64(ih) * facScale))
img = imaging.Resize(img, dw, dh, imaging.Lanczos)
var destBounds image.Rectangle
if fit == ImageFitContainCenter {
destBounds = image.Rect((ow-dw)/2, (oh-dh)/2, (ow-dw)/2+dw, (oh-dh)/2+dh)
} else if fit == ImageFitContainTopLeft {
destBounds = image.Rect(0, 0, dw, dh)
} else if fit == ImageFitContainTopRight {
destBounds = image.Rect(ow-dw, 0, ow, dh)
} else if fit == ImageFitContainBottomLeft {
destBounds = image.Rect(0, oh-dh, dw, oh)
} else if fit == ImageFitContainBottomRight {
destBounds = image.Rect(ow-dw, oh-dh, ow, oh)
}
newImg := image.NewRGBA(image.Rect(0, 0, ow, oh))
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, destBounds, img, image.Pt(0, 0), draw.Over)
return newImg, calcRelativeRect(destBounds, newImg.Bounds()), nil
}
if fit == ImageFitStretch {
// image-fit:stretch simply stretches the image to the bounding box
// we use the bigger value of [facW;facH], to (potentially) scale the bounding box down before applying it
// theoretically we could directly use [bbw, bbh] in the call to imaging.Resize,
// but if the image is (a lot) smaller than the bouding box it is useful to scale it down to reduce final pdf filesize
// we also cap fac at 1, because we never want the final rect to be bigger than the inputted bounding box (see comments at start of method)
fac := mathext.Clamp(mathext.Max(facW, facH), 0.0, 1.0)
// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio)
w := int(math.Round(bbw * fac))
h := int(math.Round(bbh * fac))
img = imaging.Resize(img, w, h, imaging.Lanczos)
newImg := image.NewRGBA(image.Rect(0, 0, w, h))
draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src)
draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over)
return newImg, PercentageRectangle{0, 0, 1, 1}, nil
}
return nil, PercentageRectangle{}, exerr.New(exerr.TypeInternal, fmt.Sprintf("unknown image-fit: '%s'", fit)).Build()
}
func VerifyAndDecodeImage(data io.Reader, mime string) (image.Image, error) {
if mime == "image/jpeg" {
img, err := jpeg.Decode(data)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode blob as jpeg").WithType(exerr.TypeInvalidImage).Build()
}
return img, nil
}
if mime == "image/png" {
img, err := png.Decode(data)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode blob as png").WithType(exerr.TypeInvalidImage).Build()
}
return img, nil
}
return nil, exerr.New(exerr.TypeInvalidMimeType, fmt.Sprintf("unknown/invalid image mimetype: '%s'", mime)).Build()
}

35
imageext/types.go Normal file
View File

@@ -0,0 +1,35 @@
package imageext
import "image"
type Rectangle struct {
X float64
Y float64
W float64
H float64
}
type PercentageRectangle struct {
X float64 // [0..1]
Y float64 // [0..1]
W float64 // [0..1]
H float64 // [0..1]
}
func (r PercentageRectangle) Of(ref Rectangle) Rectangle {
return Rectangle{
X: ref.X + r.X*ref.W,
Y: ref.Y + r.Y*ref.H,
W: r.W * ref.W,
H: r.H * ref.H,
}
}
func calcRelativeRect(inner image.Rectangle, outer image.Rectangle) PercentageRectangle {
return PercentageRectangle{
X: float64(inner.Min.X-outer.Min.X) / float64(outer.Dx()),
Y: float64(inner.Min.Y-outer.Min.Y) / float64(outer.Dy()),
W: float64(inner.Dx()) / float64(outer.Dx()),
H: float64(inner.Dy()) / float64(outer.Dy()),
}
}

View File

@@ -59,6 +59,18 @@ func ArrUnique[T comparable](array []T) []T {
return result
}
func ArrUniqueStable[T comparable](array []T) []T {
hist := make(map[T]bool, len(array))
result := make([]T, 0, len(array))
for _, v := range array {
if _, ok := hist[v]; !ok {
hist[v] = true
result = append(result, v)
}
}
return result
}
func ArrEqualsExact[T comparable](arr1 []T, arr2 []T) bool {
if len(arr1) != len(arr2) {
return false
@@ -311,6 +323,16 @@ func ArrMap[T1 any, T2 any](arr []T1, conv func(v T1) T2) []T2 {
return r
}
func ArrDeRef[T1 any](arr []*T1) []T1 {
r := make([]T1, 0, len(arr))
for _, v := range arr {
if v != nil {
r = append(r, *v)
}
}
return r
}
func MapMap[TK comparable, TV any, TR any](inmap map[TK]TV, conv func(k TK, v TV) TR) []TR {
r := make([]TR, 0, len(inmap))
for k, v := range inmap {
@@ -453,6 +475,26 @@ func ArrConcat[T any](arr ...[]T) []T {
return r
}
// ArrAppend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one
func ArrAppend[T any](arr []T, add ...T) []T {
r := ArrCopy(arr)
for _, v := range add {
r = append(r, v)
}
return r
}
// ArrPrepend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one
// Also - in contrast to ArrAppend - the add values are inserted at the start of the resulting array (in reverse order)
func ArrPrepend[T any](arr []T, add ...T) []T {
out := make([]T, len(arr)+len(add))
copy(out[len(add):], arr)
for i := 0; i < len(add); i++ {
out[len(add)-i-1] = add[i]
}
return out
}
// ArrCopy does a shallow copy of the 'in' array
func ArrCopy[T any](in []T) []T {
out := make([]T, len(in))
@@ -468,6 +510,10 @@ func ArrRemove[T comparable](arr []T, needle T) []T {
return arr
}
func ArrRemoveAt[T any](arr []T, idx int) []T {
return append(arr[:idx], arr[idx+1:]...)
}
func ArrExcept[T comparable](arr []T, needles ...T) []T {
r := make([]T, 0, len(arr))
rmlist := ArrToSet(needles)
@@ -528,3 +574,18 @@ func ArrChunk[T any](arr []T, chunkSize int) [][]T {
return res
}
func ArrGroupBy[T1 any, T2 comparable](arr []T1, groupfunc func(v T1) T2) map[T2][]T1 {
r := make(map[T2][]T1)
for _, v := range arr {
key := groupfunc(v)
if _, ok := r[key]; ok {
r[key] = append(r[key], v)
} else {
r[key] = []T1{v}
}
}
return r
}

View File

@@ -2,6 +2,7 @@ package langext
import (
"gogs.mikescher.com/BlackForestBytes/goext/tst"
"strings"
"testing"
)
@@ -10,3 +11,13 @@ func TestJoinString(t *testing.T) {
res := JoinString(ids, ",")
tst.AssertEqual(t, res, "1,2,3")
}
func TestArrPrepend(t *testing.T) {
v1 := []string{"1", "2", "3"}
v2 := ArrPrepend(v1, "4", "5", "6")
tst.AssertEqual(t, strings.Join(v1, ""), "123")
tst.AssertEqual(t, strings.Join(v2, ""), "654123")
}

View File

@@ -5,12 +5,84 @@ import (
"time"
)
func Coalesce[T any](v *T, def T) T {
if v == nil {
return def
} else {
return *v
func Coalesce[T any](v1 *T, def T) T {
if v1 != nil {
return *v1
}
return def
}
func CoalesceOpt[T any](v1 *T, v2 *T) *T {
if v1 != nil {
return v1
}
return v2
}
func Coalesce3[T any](v1 *T, v2 *T, def T) T {
if v1 != nil {
return *v1
}
if v2 != nil {
return *v2
}
return def
}
func Coalesce3Opt[T any](v1 *T, v2 *T, v3 *T) *T {
if v1 != nil {
return v1
}
if v2 != nil {
return v2
}
return v3
}
func Coalesce4[T any](v1 *T, v2 *T, v3 *T, def T) T {
if v1 != nil {
return *v1
}
if v2 != nil {
return *v2
}
if v3 != nil {
return *v3
}
return def
}
func Coalesce4Opt[T any](v1 *T, v2 *T, v3 *T, v4 *T) *T {
if v1 != nil {
return v1
}
if v2 != nil {
return v2
}
if v3 != nil {
return v3
}
return v4
}
func CoalesceDblPtr[T any](v1 **T, v2 *T) *T {
if v1 != nil {
return *v1
}
return v2
}
func CoalesceString(s *string, def string) string {

15
langext/io.go Normal file
View File

@@ -0,0 +1,15 @@
package langext
import "io"
type nopCloser struct {
io.Writer
}
func (n nopCloser) Close() error {
return nil // no op
}
func WriteNopCloser(w io.Writer) io.WriteCloser {
return nopCloser{w}
}

View File

@@ -63,3 +63,51 @@ func PatchRemJson[JV string | []byte](rawjson JV, key string) (JV, error) {
return JV(newjson), nil
}
func MarshalJsonOrPanic(v any) string {
bin, err := json.Marshal(v)
if err != nil {
panic(err)
}
return string(bin)
}
func MarshalJsonOrDefault(v any, def string) string {
bin, err := json.Marshal(v)
if err != nil {
return def
}
return string(bin)
}
func MarshalJsonOrNil(v any) *string {
bin, err := json.Marshal(v)
if err != nil {
return nil
}
return Ptr(string(bin))
}
func MarshalJsonIndentOrPanic(v any, prefix, indent string) string {
bin, err := json.MarshalIndent(v, prefix, indent)
if err != nil {
panic(err)
}
return string(bin)
}
func MarshalJsonIndentOrDefault(v any, prefix, indent string, def string) string {
bin, err := json.MarshalIndent(v, prefix, indent)
if err != nil {
return def
}
return string(bin)
}
func MarshalJsonIndentOrNil(v any, prefix, indent string) *string {
bin, err := json.MarshalIndent(v, prefix, indent)
if err != nil {
return nil
}
return Ptr(string(bin))
}

View File

@@ -66,7 +66,7 @@ func CopyMap[K comparable, V any](a map[K]V) map[K]V {
func ForceMap[K comparable, V any](v map[K]V) map[K]V {
if v == nil {
return make(map[K]V, 0)
return make(map[K]V)
} else {
return v
}

19
langext/object.go Normal file
View File

@@ -0,0 +1,19 @@
package langext
import "encoding/json"
func DeepCopyByJson[T any](v T) (T, error) {
bin, err := json.Marshal(v)
if err != nil {
return *new(T), err
}
var result T
err = json.Unmarshal(bin, &result)
if err != nil {
return *new(T), err
}
return result, nil
}

View File

@@ -22,11 +22,22 @@ func DblPtr[T any](v T) **T {
return &v_
}
func DblPtrIfNotNil[T any](v *T) **T {
if v == nil {
return nil
}
return &v
}
func DblPtrNil[T any]() **T {
var v *T = nil
return &v
}
func ArrPtr[T any](v ...T) *[]T {
return &v
}
func PtrInt32(v int32) *int32 {
return &v
}

View File

@@ -8,12 +8,28 @@ func Sort[T OrderedConstraint](arr []T) {
})
}
func AsSorted[T OrderedConstraint](arr []T) []T {
arr = ArrCopy(arr)
sort.Slice(arr, func(i1, i2 int) bool {
return arr[i1] < arr[i2]
})
return arr
}
func SortStable[T OrderedConstraint](arr []T) {
sort.SliceStable(arr, func(i1, i2 int) bool {
return arr[i1] < arr[i2]
})
}
func AsSortedStable[T OrderedConstraint](arr []T) []T {
arr = ArrCopy(arr)
sort.SliceStable(arr, func(i1, i2 int) bool {
return arr[i1] < arr[i2]
})
return arr
}
func IsSorted[T OrderedConstraint](arr []T) bool {
return sort.SliceIsSorted(arr, func(i1, i2 int) bool {
return arr[i1] < arr[i2]
@@ -26,12 +42,28 @@ func SortSlice[T any](arr []T, less func(v1, v2 T) bool) {
})
}
func AsSortedSlice[T any](arr []T, less func(v1, v2 T) bool) []T {
arr = ArrCopy(arr)
sort.Slice(arr, func(i1, i2 int) bool {
return less(arr[i1], arr[i2])
})
return arr
}
func SortSliceStable[T any](arr []T, less func(v1, v2 T) bool) {
sort.SliceStable(arr, func(i1, i2 int) bool {
return less(arr[i1], arr[i2])
})
}
func AsSortedSliceStable[T any](arr []T, less func(v1, v2 T) bool) []T {
arr = ArrCopy(arr)
sort.SliceStable(arr, func(i1, i2 int) bool {
return less(arr[i1], arr[i2])
})
return arr
}
func IsSliceSorted[T any](arr []T, less func(v1, v2 T) bool) bool {
return sort.SliceIsSorted(arr, func(i1, i2 int) bool {
return less(arr[i1], arr[i2])
@@ -44,12 +76,28 @@ func SortBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TEle
})
}
func AsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) []TElem {
arr = ArrCopy(arr)
sort.Slice(arr, func(i1, i2 int) bool {
return selector(arr[i1]) < selector(arr[i2])
})
return arr
}
func SortByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) {
sort.SliceStable(arr, func(i1, i2 int) bool {
return selector(arr[i1]) < selector(arr[i2])
})
}
func AsSortedByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) []TElem {
arr = ArrCopy(arr)
sort.SliceStable(arr, func(i1, i2 int) bool {
return selector(arr[i1]) < selector(arr[i2])
})
return arr
}
func IsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) {
sort.SliceStable(arr, func(i1, i2 int) bool {
return selector(arr[i1]) < selector(arr[i2])

View File

@@ -88,12 +88,15 @@ func StrRunePadRight(str string, pad string, padlen int) string {
func Indent(str string, pad string) string {
eonl := strings.HasSuffix(str, "\n")
if eonl {
str = str[0 : len(str)-1]
}
r := ""
for _, v := range strings.Split(str, "\n") {
r += pad + v + "\n"
}
if eonl {
if !eonl {
r = r[0 : len(r)-1]
}
@@ -115,3 +118,21 @@ func StrRepeat(val string, count int) string {
}
return r
}
func StrWrap(val string, linelen int, seperator string) string {
res := ""
for iPos := 0; ; {
next := min(iPos+linelen, len(val))
res += val[iPos:next]
iPos = next
if iPos >= len(val) {
break
}
res += seperator
}
return res
}

152
langext/string_test.go Normal file
View File

@@ -0,0 +1,152 @@
package langext
import "testing"
func TestStrLimitBehaviour(t *testing.T) {
val := "Hello, World!"
maxlen := 5
suffix := "..."
expected := "He..."
result := StrLimit(val, maxlen, suffix)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrSplitBehaviour1(t *testing.T) {
val := "Hello,World,,"
sep := ","
expected := []string{"Hello", "World"}
result := StrSplit(val, sep, false)
if len(result) != len(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrSplitBehaviour2(t *testing.T) {
val := "Hello,World,,"
sep := ","
expected := []string{"Hello", "World", "", ""}
result := StrSplit(val, sep, true)
if len(result) != len(expected) {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrPadRightBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "Hello*****"
result := StrPadRight(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrPadLeftBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "*****Hello"
result := StrPadLeft(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRunePadLeftBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "*****Hello"
result := StrRunePadLeft(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRunePadRightBehaviour(t *testing.T) {
str := "Hello"
pad := "*"
padlen := 10
expected := "Hello*****"
result := StrRunePadRight(str, pad, padlen)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIndentBehaviour1(t *testing.T) {
str := "Hello\nWorld"
pad := ".."
expected := "..Hello\n..World"
result := Indent(str, pad)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestIndentBehaviour2(t *testing.T) {
str := "Hello\nWorld\n"
pad := ".."
expected := "..Hello\n..World\n"
result := Indent(str, pad)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrRepeatBehaviour(t *testing.T) {
val := "Hello"
count := 3
expected := "HelloHelloHello"
result := StrRepeat(val, count)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour1(t *testing.T) {
val := "123456789"
linelen := 5
seperator := "\n"
expected := "12345\n6789"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour2(t *testing.T) {
val := "1234567890"
linelen := 5
seperator := "\n"
expected := "12345\n67890"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour3(t *testing.T) {
val := "****************"
linelen := 4
seperator := "\n"
expected := "****\n****\n****\n****"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}
func TestStrWrapBehaviour4(t *testing.T) {
val := "*****************"
linelen := 4
seperator := "\n"
expected := "****\n****\n****\n****\n*"
result := StrWrap(val, linelen, seperator)
if result != expected {
t.Errorf("Expected %v but got %v", expected, result)
}
}

View File

@@ -1,6 +1,9 @@
package mathext
import "gogs.mikescher.com/BlackForestBytes/goext/langext"
import (
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func Sum[T langext.NumberConstraint](v []T) T {
total := T(0)
@@ -41,3 +44,53 @@ func ArrMax[T langext.OrderedConstraint](v []T) T {
}
return r
}
func MustPercentile[T langext.NumberConstraint](rawdata []T, percentile float64) T {
v, err := Percentile(rawdata, percentile)
if err != nil {
panic(err)
}
return v
}
func Percentile[T langext.NumberConstraint](rawdata []T, percentile float64) (T, error) {
v, err := FloatPercentile(rawdata, percentile)
if err != nil {
return T(0), err
}
return T(v), nil
}
func FloatPercentile[T langext.NumberConstraint](rawdata []T, percentile float64) (float64, error) {
if len(rawdata) == 0 {
return 0, exerr.New(exerr.TypeAssert, "no data to calculate percentile").Any("percentile", percentile).Build()
}
if percentile < 0 || percentile > 100 {
return 0, exerr.New(exerr.TypeAssert, "percentile out of range").Any("percentile", percentile).Build()
}
data := langext.ArrCopy(rawdata)
langext.Sort(data)
idxFloat := float64(len(data)-1) * (percentile / float64(100))
idxInt := int(idxFloat)
// exact match on index
if idxFloat == float64(idxInt) {
return float64(data[idxInt]), nil
}
// linear interpolation
v1 := data[idxInt]
v2 := data[idxInt+1]
weight := idxFloat - float64(idxInt)
valFloat := (float64(v1) * (1 - weight)) + (float64(v2) * weight)
return valFloat, nil
}

238
mathext/statistics_test.go Normal file
View File

@@ -0,0 +1,238 @@
package mathext
import (
"math"
"testing"
)
func TestSumIntsHappyPath(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
expected := 15
result := Sum(values)
if result != expected {
t.Errorf("Sum of %v; expected %v, got %v", values, expected, result)
}
}
func TestSumFloatsHappyPath(t *testing.T) {
values := []float64{1.1, 2.2, 3.3}
expected := 6.6
result := Sum(values)
if result != expected {
t.Errorf("Sum of %v; expected %v, got %v", values, expected, result)
}
}
func TestMeanOfInts(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
expected := 3.0
result := Mean(values)
if result != expected {
t.Errorf("Mean of %v; expected %v, got %v", values, expected, result)
}
}
func TestMedianOddNumberOfElements(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
expected := 3.0
result := Median(values)
if result != expected {
t.Errorf("Median of %v; expected %v, got %v", values, expected, result)
}
}
func TestMedianEvenNumberOfElements(t *testing.T) {
values := []float64{1, 2, 3, 4, 5, 6}
expected := 3.5
result := Median(values)
if result != expected {
t.Errorf("Median of %v; expected %v, got %v", values, expected, result)
}
}
func TestArrMinInts(t *testing.T) {
values := []int{5, 3, 9, 1, 4}
expected := 1
result := ArrMin(values)
if result != expected {
t.Errorf("ArrMin of %v; expected %v, got %v", values, expected, result)
}
}
func TestArrMaxInts(t *testing.T) {
values := []int{5, 3, 9, 1, 4}
expected := 9
result := ArrMax(values)
if result != expected {
t.Errorf("ArrMax of %v; expected %v, got %v", values, expected, result)
}
}
func TestPercentileValidInput(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
percentile := 50.0
expected := 3
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileOutOfRange(t *testing.T) {
values := []int{1, 2, 3, 4, 5}
percentile := 150.0
_, err := Percentile(values, percentile)
if err == nil {
t.Errorf("Expected error for percentile %v out of range, got nil", percentile)
}
}
func TestPercentileValueInArray(t *testing.T) {
values := []int{1, 3, 5, 7, 9}
percentile := 40.0
expected := 4
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileValueInArray(t *testing.T) {
values := []int{1, 3, 5, 7, 9}
percentile := 40.0
expected := 4.2
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileInterpolation(t *testing.T) {
values := []float64{1.0, 2.0, 3.0, 4.0, 5.0}
percentile := 25.0
expected := 2.0
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileSingleValue(t *testing.T) {
values := []int{10}
percentile := 50.0
expected := 10
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileExactlyBetweenTwoValues(t *testing.T) {
values := []float64{1, 2, 3, 4, 5}
percentile := 62.5 // Exactly between 3 and 4
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileTwoThirdsBetweenTwoValues(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 66.666666666666
expected := 6.666666666666667 // Since 2/3 of the way between 6 and 7 is 6.666...
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileBetweenTwoValues1(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 11.0
expected := 1.1
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileBetweenTwoValues2(t *testing.T) {
values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
percentile := 9.0
expected := 0.9
result, err := Percentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInput(t *testing.T) {
values := []float64{5, 1, 4, 2, 3} // Unsorted input
percentile := 50.0
expected := 3.0
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInputLowPercentile(t *testing.T) {
values := []float64{10, 6, 7, 3, 2, 9, 8, 1, 4, 5} // Unsorted input
percentile := 10.0
expected := 1.9 // Expecting interpolation between 1 and 2
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestPercentileUnsortedInputHighPercentile(t *testing.T) {
values := []float64{10, 6, 7, 3, 2, 9, 8, 1, 4, 5} // Unsorted input
percentile := 90.0
expected := 9.1 // Expecting interpolation between 9 and 10
result, err := Percentile(values, percentile)
if err != nil || result != expected {
t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileExactValueFromInput(t *testing.T) {
values := []float64{1.5, 2.5, 3.5, 4.5, 5.5}
percentile := 50.0 // Exact value from input array should be 3.5
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileInterpolatedValue(t *testing.T) {
values := []float64{1.0, 2.0, 3.0, 4.0, 5.0}
percentile := 87.5 // Interpolated value between 4.0 and 5.0
expected := 4.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileUnsortedInputExactValue(t *testing.T) {
values := []float64{5.5, 1.5, 4.5, 2.5, 3.5} // Unsorted input
percentile := 50.0
expected := 3.5
result, err := FloatPercentile(values, percentile)
if err != nil || result != expected {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}
func TestFloatPercentileUnsortedInputInterpolatedValue(t *testing.T) {
values := []float64{10.5, 6.5, 7.5, 3.5, 2.5, 9.5, 8.5, 1.5, 4.5, 5.5}
percentile := 80.0 // Interpolated value between 4.0 and 5.0
expected := 8.7
result, err := FloatPercentile(values, percentile)
if err != nil || math.Abs(result-expected) > 1e-9 {
t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err)
}
}

View File

@@ -5,6 +5,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsontype"
"go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rfctime"
"reflect"
@@ -34,6 +35,15 @@ func CreateGoExtBsonRegistry() *bsoncodec.Registry {
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.SecondsF64(0)), rfctime.SecondsF64(0))
rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(rfctime.SecondsF64(0))), rfctime.SecondsF64(0))
rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorCategory{}), exerr.ErrorCategory{})
rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorCategory{})), exerr.ErrorCategory{})
rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorSeverity{}), exerr.ErrorSeverity{})
rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorSeverity{})), exerr.ErrorSeverity{})
rb.RegisterTypeDecoder(reflect.TypeOf(exerr.ErrorType{}), exerr.ErrorType{})
rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(exerr.ErrorType{})), exerr.ErrorType{})
bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb)
bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb)

View File

@@ -1,13 +1,14 @@
package pagination
import (
"context"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
)
type MongoFilter interface {
FilterQuery() mongo.Pipeline
Sort() bson.D
FilterQuery(ctx context.Context) mongo.Pipeline
Sort(ctx context.Context) bson.D
}
type dynamicFilter struct {
@@ -15,11 +16,11 @@ type dynamicFilter struct {
sort bson.D
}
func (d dynamicFilter) FilterQuery() mongo.Pipeline {
func (d dynamicFilter) FilterQuery(ctx context.Context) mongo.Pipeline {
return d.pipeline
}
func (d dynamicFilter) Sort() bson.D {
func (d dynamicFilter) Sort(ctx context.Context) bson.D {
return d.sort
}

View File

@@ -1,19 +1,45 @@
package reflectext
import "reflect"
import (
"encoding/json"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
)
func ConvertStructToMap(v any) any {
return reflectToMap(reflect.ValueOf(v))
type ConvertStructToMapOpt struct {
KeepJsonMarshalTypes bool
MaxDepth *int
}
func reflectToMap(fv reflect.Value) any {
func ConvertStructToMap(v any, opts ...ConvertStructToMapOpt) map[string]any {
opt := ConvertStructToMapOpt{}
if len(opts) > 0 {
opt = opts[0]
}
res := reflectToMap(reflect.ValueOf(v), 1, opt)
if v, ok := res.(map[string]any); ok {
return v
} else if langext.IsNil(res) {
return nil
} else {
panic("not an object")
}
}
func reflectToMap(fv reflect.Value, depth int, opt ConvertStructToMapOpt) any {
if opt.MaxDepth != nil && depth > *opt.MaxDepth {
return fv.Interface()
}
if fv.Kind() == reflect.Ptr {
if fv.IsNil() {
return nil
} else {
return reflectToMap(fv.Elem())
return reflectToMap(fv.Elem(), depth, opt)
}
}
@@ -30,7 +56,7 @@ func reflectToMap(fv reflect.Value) any {
arrlen := fv.Len()
arr := make([]any, arrlen)
for i := 0; i < arrlen; i++ {
arr[i] = reflectToMap(fv.Index(i))
arr[i] = reflectToMap(fv.Index(i), depth+1, opt)
}
return arr
@@ -41,7 +67,7 @@ func reflectToMap(fv reflect.Value) any {
arrlen := fv.Len()
arr := make([]any, arrlen)
for i := 0; i < arrlen; i++ {
arr[i] = reflectToMap(fv.Index(i))
arr[i] = reflectToMap(fv.Index(i), depth+1, opt)
}
return arr
@@ -56,11 +82,15 @@ func reflectToMap(fv reflect.Value) any {
if fv.Kind() == reflect.Struct {
if opt.KeepJsonMarshalTypes && fv.Type().Implements(reflect.TypeFor[json.Marshaler]()) {
return fv.Interface()
}
res := make(map[string]any)
for i := 0; i < fv.NumField(); i++ {
if fv.Type().Field(i).IsExported() {
res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i))
res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i), depth+1, opt)
}
}

View File

@@ -0,0 +1,42 @@
package reflectext
import (
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"testing"
"time"
)
func TestConvertStructToMap(t *testing.T) {
type tst struct {
FieldA int
FieldB string
FieldC time.Time
FieldD []float64
FieldE1 *int
FieldE2 **int
FieldE3 *int
FieldE4 **int
FieldE5 *int
FieldE6 **int
}
value := tst{
FieldA: 123,
FieldB: "hello",
FieldC: time.Date(2020, 05, 12, 8, 30, 0, 0, time.UTC),
FieldD: []float64{1, 2, 3, 4, 5, 6, 7},
FieldE1: nil,
FieldE2: nil,
FieldE3: langext.Ptr(12),
FieldE4: langext.DblPtr(12),
FieldE5: nil,
FieldE6: langext.DblPtrNil[int](),
}
valueOut := ConvertStructToMap(value, ConvertStructToMapOpt{KeepJsonMarshalTypes: true})
fmt.Printf("%+v\n", valueOut)
}

View File

@@ -47,9 +47,3 @@ func TestGetMapField(t *testing.T) {
tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test")), "12 true")
tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test2")), "0 false")
}
func main2() {
}
func main() {
}

185
reflectext/structAccess.go Normal file
View File

@@ -0,0 +1,185 @@
package reflectext
import (
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"strings"
)
var ErrAccessStructInvalidFieldType = errors.New("invalid field type")
var ErrAccessStructFieldInPathWasNil = errors.New("a field in the path was nil")
var ErrAccessStructInvalidArrayIndex = errors.New("invalid array index")
var ErrAccessStructInvalidMapKey = errors.New("invalid map key")
var ErrAccessStructArrayAccess = errors.New("trying to access array")
var ErrAccessStructMapAccess = errors.New("trying to access map")
var ErrAccessStructMissingField = errors.New("missing field")
type AccessStructOpt struct {
ReturnNilOnMissingFields bool // return nil (instead of error) when a field in the path is missing (aka the supplied path is wrong)
ReturnNilOnNilPtrFields bool // return nil (instead of error) when a field in the path is nil
ReturnNilOnWrongFinalFieldType bool // return nil (instead of error) when the (final) field is not of the requested generic type
ReturnNilOnWrongIntermedFieldType bool // return nil (instead of error) when the intermediate field has an invalid type
ReturnNilOnInvalidArrayIndizes bool // return nil (instead of error) when trying to acces an array with an invalid index (not a number or out of range)
ReturnNilOnMissingMapKeys bool // return nil (instead of error) when trying to access a map with a missing key
UsedTagForKeys *string // Use this tag for key names in the struct (instead of the StructField.Name)
PreventArrayAccess bool // do not access array indizes - throw an error instead
PreventMapAccess bool // do not access maps - throw an error instead
}
func AccessJSONStruct[TResult any](v any, path string) (TResult, error) {
return AccessStructByStringPath[TResult](v, path, AccessStructOpt{UsedTagForKeys: langext.Ptr("json")})
}
func AccessStruct[TResult any](v any, path string) (TResult, error) {
return AccessStructByStringPath[TResult](v, path, AccessStructOpt{})
}
func AccessStructByArrayPath[TResult any](v any, path []string, opts ...AccessStructOpt) (TResult, error) {
opt := AccessStructOpt{}
if len(opts) > 0 {
opt = opts[0]
}
resultVal, err := accessStructByPath(reflect.ValueOf(v), path, opt)
if err != nil {
return *new(TResult), err
}
if resultValCast, ok := resultVal.(TResult); ok {
return resultValCast, nil
} else if opt.ReturnNilOnWrongFinalFieldType {
return *new(TResult), nil
} else {
return *new(TResult), ErrAccessStructInvalidFieldType
}
}
func AccessStructByStringPath[TResult any](v any, path string, opts ...AccessStructOpt) (TResult, error) {
opt := AccessStructOpt{}
if len(opts) > 0 {
opt = opts[0]
}
arrpath := strings.Split(path, ".")
resultVal, err := accessStructByPath(reflect.ValueOf(v), arrpath, opt)
if err != nil {
return *new(TResult), err
}
if resultValCast, ok := resultVal.(TResult); ok {
return resultValCast, nil
} else if opt.ReturnNilOnWrongFinalFieldType {
return *new(TResult), nil
} else {
return *new(TResult), ErrAccessStructInvalidFieldType
}
}
func accessStructByPath(val reflect.Value, path []string, opt AccessStructOpt) (any, error) {
if len(path) == 0 {
return val.Interface(), nil
}
currPath := path[0]
if val.Kind() == reflect.Ptr {
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
return accessStructByPath(val.Elem(), path, opt)
}
if val.Kind() == reflect.Array || val.Kind() == reflect.Slice {
if opt.PreventArrayAccess {
return nil, ErrAccessStructArrayAccess
}
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
arrIdx, err := strconv.ParseInt(currPath, 10, 64)
if err != nil {
if opt.ReturnNilOnInvalidArrayIndizes {
return nil, nil
} else {
return nil, ErrAccessStructInvalidArrayIndex
}
}
if arrIdx < 0 || int(arrIdx) >= val.Len() {
if opt.ReturnNilOnInvalidArrayIndizes {
return nil, nil
} else {
return nil, ErrAccessStructInvalidArrayIndex
}
}
return accessStructByPath(val.Index(int(arrIdx)), path[1:], opt)
}
if val.Kind() == reflect.Map {
if opt.PreventMapAccess {
return nil, ErrAccessStructMapAccess
}
if val.IsNil() {
if opt.ReturnNilOnNilPtrFields {
return nil, nil
} else {
return nil, ErrAccessStructFieldInPathWasNil
}
}
mapval := val.MapIndex(reflect.ValueOf(currPath))
if !mapval.IsValid() || mapval.IsZero() {
if opt.ReturnNilOnMissingMapKeys {
return nil, nil
} else {
return nil, ErrAccessStructInvalidMapKey
}
}
return accessStructByPath(mapval, path[1:], opt)
}
if val.Kind() == reflect.Struct {
if opt.UsedTagForKeys != nil {
for i := 0; i < val.NumField(); i++ {
if val.Type().Field(i).Tag.Get(*opt.UsedTagForKeys) == currPath {
return accessStructByPath(val.Field(i), path[1:], opt)
}
}
if opt.ReturnNilOnMissingFields {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
} else {
for i := 0; i < val.NumField(); i++ {
if val.Type().Field(i).Name == currPath {
return accessStructByPath(val.Field(i), path[1:], opt)
}
}
if opt.ReturnNilOnMissingFields {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
}
}
if opt.ReturnNilOnWrongIntermedFieldType {
return nil, nil
} else {
return nil, ErrAccessStructMissingField
}
}

View File

@@ -0,0 +1,259 @@
package reflectext
import "testing"
type TestStruct struct {
Name string `json:"name"`
Age int `json:"age"`
}
func TestAccessStructByArrayPath_HappyPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
result, err := AccessStructByArrayPath[string](testStruct, []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidField(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByStringPath_HappyPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
result, err := AccessStructByStringPath[string](testStruct, "Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByStringPath_InvalidField(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByStringPath[string](testStruct, "Invalid")
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type RecursiveStruct struct {
Name string
Sub *RecursiveStruct
SubSlice []RecursiveStruct
}
func TestAccessStructByArrayPath_RecursiveStruct(t *testing.T) {
testStruct := RecursiveStruct{Name: "John", Sub: &RecursiveStruct{Name: "Jane"}}
result, err := AccessStructByArrayPath[string](*testStruct.Sub, []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_RecursiveStructSlice(t *testing.T) {
testStruct := RecursiveStruct{Name: "John", SubSlice: []RecursiveStruct{{Name: "Jane"}}}
result, err := AccessStructByArrayPath[string](testStruct.SubSlice[0], []string{"Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_WrongType(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[int](testStruct, []string{"Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByArrayPath_InvalidPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Name", "Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type NestedStruct struct {
Name string
Sub *TestStruct
}
func TestAccessStructByStringPath_NestedStruct(t *testing.T) {
testStruct := NestedStruct{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
result, err := AccessStructByStringPath[string](testStruct, "Sub.Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
type DeepNestedStruct struct {
Name string
Sub *NestedStruct
}
func TestAccessStructByStringPath_DeepNestedStruct(t *testing.T) {
testStruct := DeepNestedStruct{Name: "John", Sub: &NestedStruct{Name: "Jane", Sub: &TestStruct{Name: "Doe", Age: 30}}}
result, err := AccessStructByStringPath[string](testStruct, "Sub.Sub.Name")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Doe" {
t.Errorf("Expected 'Doe', got '%s'", result)
}
}
type MapStruct struct {
Name string
Age int
}
type TestStructWithMap struct {
MapField map[string]MapStruct
}
func TestAccessStructByArrayPath_MapField(t *testing.T) {
testStruct := TestStructWithMap{
MapField: map[string]MapStruct{
"key": {Name: "John", Age: 30},
},
}
result, err := AccessStructByArrayPath[string](testStruct, []string{"MapField", "key", "Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "John" {
t.Errorf("Expected 'John', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidMapKey(t *testing.T) {
testStruct := TestStructWithMap{
MapField: map[string]MapStruct{
"key": {Name: "John", Age: 30},
},
}
_, err := AccessStructByArrayPath[string](testStruct, []string{"MapField", "invalid", "Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type ArrayStruct struct {
Name string
Arr []TestStruct
}
func TestAccessStructByArrayPath_ArrayField(t *testing.T) {
testStruct := ArrayStruct{
Name: "John",
Arr: []TestStruct{{Name: "Jane", Age: 30}},
}
result, err := AccessStructByArrayPath[string](testStruct, []string{"Arr", "0", "Name"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_InvalidArrayIndex(t *testing.T) {
testStruct := ArrayStruct{
Name: "John",
Arr: []TestStruct{{Name: "Jane", Age: 30}},
}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Arr", "1", "Name"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type FunctionStruct struct {
Name string
Func func() string
}
func TestAccessStructByArrayPath_FunctionField(t *testing.T) {
testStruct := FunctionStruct{Name: "John", Func: func() string { return "Hello" }}
_, err := AccessStructByArrayPath[string](testStruct, []string{"Func"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
func TestAccessStructByArrayPath_NonExistentPath(t *testing.T) {
testStruct := TestStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[string](testStruct, []string{"NonExistent"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type NestedStructWithTag struct {
Name string `json:"name"`
Sub *TestStruct `json:"sub"`
}
func TestAccessStructByArrayPath_UsedTagForKeys(t *testing.T) {
testStruct := NestedStructWithTag{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
tag := "json"
result, err := AccessStructByArrayPath[string](testStruct, []string{"sub", "name"}, AccessStructOpt{UsedTagForKeys: &tag})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != "Jane" {
t.Errorf("Expected 'Jane', got '%s'", result)
}
}
func TestAccessStructByArrayPath_UsedTagForKeysInvalid(t *testing.T) {
testStruct := NestedStructWithTag{Name: "John", Sub: &TestStruct{Name: "Jane", Age: 30}}
tag := "json"
_, err := AccessStructByArrayPath[string](testStruct, []string{"sub", "invalid"}, AccessStructOpt{UsedTagForKeys: &tag})
if err == nil {
t.Errorf("Expected error, got nil")
}
}
type DifferentTypeStruct struct {
Name string
Age int
}
func TestAccessStructByArrayPath_DifferentType(t *testing.T) {
testStruct := DifferentTypeStruct{Name: "John", Age: 30}
result, err := AccessStructByArrayPath[any](testStruct, []string{"Age"})
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != 30 {
t.Errorf("Expected '30', got '%v'", result)
}
}
func TestAccessStructByArrayPath_DifferentTypeInvalid(t *testing.T) {
testStruct := DifferentTypeStruct{Name: "John", Age: 30}
_, err := AccessStructByArrayPath[any](testStruct, []string{"Invalid"})
if err == nil {
t.Errorf("Expected error, got nil")
}
}

View File

@@ -9,6 +9,8 @@ import (
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"reflect"
"strconv"
"strings"
"time"
)
@@ -65,36 +67,20 @@ func (t *Date) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &str); err != nil {
return err
}
t0, err := time.Parse(t.FormatStr(), str)
if err != nil {
return err
}
t.Year = t0.Year()
t.Month = int(t0.Month())
t.Day = t0.Day()
return nil
return t.ParseString(str)
}
func (t Date) MarshalJSON() ([]byte, error) {
str := t.TimeUTC().Format(t.FormatStr())
str := t.String()
return json.Marshal(str)
}
func (t Date) MarshalText() ([]byte, error) {
b := make([]byte, 0, len(t.FormatStr()))
return t.TimeUTC().AppendFormat(b, t.FormatStr()), nil
return []byte(t.String()), nil
}
func (t *Date) UnmarshalText(data []byte) error {
var err error
v, err := time.Parse(t.FormatStr(), string(data))
if err != nil {
return err
}
t.Year = v.Year()
t.Month = int(v.Month())
t.Day = v.Day()
return nil
return t.ParseString(string(data))
}
func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
@@ -116,6 +102,13 @@ func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
return err
}
if tt == "" {
t.Year = 0
t.Month = 0
t.Day = 0
return nil
}
v, err := time.Parse(t.FormatStr(), tt)
if err != nil {
return err
@@ -128,7 +121,10 @@ func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
}
func (t Date) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(t.TimeUTC().Format(t.FormatStr()))
if t.IsZero() {
return bson.MarshalValue("")
}
return bson.MarshalValue(t.String())
}
func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
@@ -164,7 +160,7 @@ func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val
}
func (t Date) Serialize() string {
return t.TimeUTC().Format(t.FormatStr())
return t.String()
}
func (t Date) FormatStr() string {
@@ -212,11 +208,52 @@ func (t Date) Format(layout string) string {
}
func (t Date) GoString() string {
return t.TimeUTC().GoString()
return fmt.Sprintf("rfctime.Date{Year: %d, Month: %d, Day: %d}", t.Year, t.Month, t.Day)
}
func (t Date) String() string {
return t.TimeUTC().String()
return fmt.Sprintf("%04d-%02d-%02d", t.Year, t.Month, t.Day)
}
func (t *Date) ParseString(v string) error {
split := strings.Split(v, "-")
if len(split) != 3 {
return errors.New("invalid date format: " + v)
}
year, err := strconv.ParseInt(split[0], 10, 32)
if err != nil {
return errors.New("invalid date format: " + v + ": " + err.Error())
}
month, err := strconv.ParseInt(split[1], 10, 32)
if err != nil {
return errors.New("invalid date format: " + v + ": " + err.Error())
}
day, err := strconv.ParseInt(split[2], 10, 32)
if err != nil {
return errors.New("invalid date format: " + v + ": " + err.Error())
}
if year < 0 {
return errors.New("invalid date format: " + v + ": year is negative")
}
if month < 1 || month > 12 {
return errors.New("invalid date format: " + v + ": month is out of range")
}
if day < 1 || day > 31 {
return errors.New("invalid date format: " + v + ": day is out of range")
}
t.Year = int(year)
t.Month = int(month)
t.Day = int(day)
return nil
}
func (t Date) IsZero() bool {
return t.Year == 0 && t.Month == 0 && t.Day == 0
}
func NewDate(t time.Time) Date {

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"time"
)
@@ -245,6 +246,13 @@ func NewRFC3339(t time.Time) RFC3339Time {
return RFC3339Time(t)
}
func NewRFC3339Ptr(t *time.Time) *RFC3339Time {
if t == nil {
return nil
}
return langext.Ptr(RFC3339Time(*t))
}
func NowRFC3339() RFC3339Time {
return RFC3339Time(time.Now())
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"time"
)
@@ -245,6 +246,13 @@ func NewRFC3339Nano(t time.Time) RFC3339NanoTime {
return RFC3339NanoTime(t)
}
func NewRFC3339NanoPtr(t *time.Time) *RFC3339NanoTime {
if t == nil {
return nil
}
return langext.Ptr(RFC3339NanoTime(*t))
}
func NowRFC3339Nano() RFC3339NanoTime {
return RFC3339NanoTime(time.Now())
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"time"
@@ -239,6 +240,13 @@ func NewUnix(t time.Time) UnixTime {
return UnixTime(t)
}
func NewUnixPtr(t *time.Time) *UnixTime {
if t == nil {
return nil
}
return langext.Ptr(UnixTime(*t))
}
func NowUnix() UnixTime {
return UnixTime(time.Now())
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"time"
@@ -239,6 +240,13 @@ func NewUnixMilli(t time.Time) UnixMilliTime {
return UnixMilliTime(t)
}
func NewUnixMilliPtr(t *time.Time) *UnixMilliTime {
if t == nil {
return nil
}
return langext.Ptr(UnixMilliTime(*t))
}
func NowUnixMilli() UnixMilliTime {
return UnixMilliTime(time.Now())
}

View File

@@ -8,6 +8,7 @@ import (
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strconv"
"time"
@@ -239,6 +240,13 @@ func NewUnixNano(t time.Time) UnixNanoTime {
return UnixNanoTime(t)
}
func NewUnixNanoPtr(t *time.Time) *UnixNanoTime {
if t == nil {
return nil
}
return langext.Ptr(UnixNanoTime(*t))
}
func NowUnixNano() UnixNanoTime {
return UnixNanoTime(time.Now())
}

View File

@@ -52,8 +52,7 @@ func TestCreateUpdateStatement(t *testing.T) {
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
db := NewDB(xdb)
db.RegisterDefaultConverter()
db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue})
_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{})
tst.AssertNoErr(t, err)

32
sq/commentTrimmer.go Normal file
View File

@@ -0,0 +1,32 @@
package sq
import (
"context"
"strings"
)
var CommentTrimmer = NewPreListener(fnTrimComments)
func fnTrimComments(ctx context.Context, cmdtype string, id *uint16, sql *string, params *PP) error {
res := make([]string, 0)
for _, s := range strings.Split(*sql, "\n") {
if strings.HasPrefix(strings.TrimSpace(s), "--") {
continue
}
idx := strings.Index(s, "--")
if idx != -1 {
s = s[:idx]
}
s = strings.TrimRight(s, " \t\r\n")
res = append(res, s)
}
*sql = strings.Join(res, "\n")
return nil
}

View File

@@ -1,17 +1,10 @@
package sq
import (
"encoding/json"
"errors"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rfctime"
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
"reflect"
"strconv"
"strings"
"time"
)
type DBTypeConverter interface {
@@ -21,169 +14,16 @@ type DBTypeConverter interface {
DBToModel(v any) (any, error)
}
var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) {
return langext.Conditional(v, int64(1), int64(0)), nil
}, func(v int64) (bool, error) {
if v == 0 {
return false, nil
}
if v == 1 {
return true, nil
}
return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v))
})
type DBDataConstraint interface {
string | langext.NumberConstraint | []byte
}
var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) {
return v.UnixMilli(), nil
}, func(v int64) (time.Time, error) {
return time.UnixMilli(v), nil
})
type DatabaseConvertible[TModelData any, TDBData DBDataConstraint] interface {
MarshalToDB(v TModelData) (TDBData, error)
UnmarshalToModel(v TDBData) (TModelData, error)
}
var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) {
return v.UnixMilli(), nil
}, func(v int64) (rfctime.UnixMilliTime, error) {
return rfctime.NewUnixMilli(time.UnixMilli(v)), nil
})
var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) {
return v.UnixNano(), nil
}, func(v int64) (rfctime.UnixNanoTime, error) {
return rfctime.NewUnixNano(time.Unix(0, v)), nil
})
var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) {
return v.Unix(), nil
}, func(v int64) (rfctime.UnixTime, error) {
return rfctime.NewUnix(time.Unix(v, 0)), nil
})
// ConverterRFC339TimeToString
// Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter
var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) {
return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil
}, func(v string) (rfctime.RFC3339Time, error) {
t, err := time.Parse("2006-01-02 15:04:05", v)
if err != nil {
return rfctime.RFC3339Time{}, err
}
return rfctime.NewRFC3339(t), nil
})
// ConverterRFC339NanoTimeToString
// Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter
var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) {
return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil
}, func(v string) (rfctime.RFC3339NanoTime, error) {
t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC)
if err != nil {
return rfctime.RFC3339NanoTime{}, err
}
return rfctime.NewRFC3339Nano(t), nil
})
var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) {
return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil
}, func(v string) (rfctime.Date, error) {
split := strings.Split(v, "-")
if len(split) != 3 {
return rfctime.Date{}, errors.New("invalid date format: " + v)
}
year, err := strconv.ParseInt(split[0], 10, 32)
if err != nil {
return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error())
}
month, err := strconv.ParseInt(split[0], 10, 32)
if err != nil {
return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error())
}
day, err := strconv.ParseInt(split[0], 10, 32)
if err != nil {
return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error())
}
return rfctime.Date{Year: int(year), Month: int(month), Day: int(day)}, nil
})
var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) {
return v.SerializeShort(), nil
}, func(v string) (rfctime.Time, error) {
res := rfctime.Time{}
err := res.Deserialize(v)
if err != nil {
return rfctime.Time{}, err
}
return res, nil
})
var ConverterRFCSecondsF64ToString = NewDBTypeConverter[rfctime.SecondsF64, float64](func(v rfctime.SecondsF64) (float64, error) {
return v.Seconds(), nil
}, func(v float64) (rfctime.SecondsF64, error) {
return rfctime.NewSecondsF64(timeext.FromSeconds(v)), nil
})
var ConverterJsonObjToString = NewDBTypeConverter[JsonObj, string](func(v JsonObj) (string, error) {
mrsh, err := json.Marshal(v)
if err != nil {
return "", err
}
return string(mrsh), nil
}, func(v string) (JsonObj, error) {
var mrsh JsonObj
if err := json.Unmarshal([]byte(v), &mrsh); err != nil {
return JsonObj{}, err
}
return mrsh, nil
})
var ConverterJsonArrToString = NewDBTypeConverter[JsonArr, string](func(v JsonArr) (string, error) {
mrsh, err := json.Marshal(v)
if err != nil {
return "", err
}
return string(mrsh), nil
}, func(v string) (JsonArr, error) {
var mrsh JsonArr
if err := json.Unmarshal([]byte(v), &mrsh); err != nil {
return JsonArr{}, err
}
return mrsh, nil
})
var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) {
return v.Category, nil
}, func(v string) (exerr.ErrorCategory, error) {
for _, cat := range exerr.AllCategories {
if cat.Category == v {
return cat, nil
}
}
return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory")
})
var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) {
return v.Severity, nil
}, func(v string) (exerr.ErrorSeverity, error) {
for _, sev := range exerr.AllSeverities {
if sev.Severity == v {
return sev, nil
}
}
return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity")
})
var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) {
return v.Key, nil
}, func(v string) (exerr.ErrorType, error) {
for _, etp := range exerr.ListRegisteredTypes() {
if etp.Key == v {
return etp, nil
}
}
return exerr.NewType(v, nil), nil
})
type dbTypeConverterImpl[TModelData any, TDBData any] struct {
type dbTypeConverterImpl[TModelData any, TDBData DBDataConstraint] struct {
dbTypeString string
modelTypeString string
todb func(v TModelData) (TDBData, error)
@@ -212,7 +52,7 @@ func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error)
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v))
}
func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter {
func NewDBTypeConverter[TModelData any, TDBData DBDataConstraint](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter {
return &dbTypeConverterImpl[TModelData, TDBData]{
dbTypeString: fmt.Sprintf("%T", *new(TDBData)),
modelTypeString: fmt.Sprintf("%T", *new(TModelData)),
@@ -221,6 +61,15 @@ func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TD
}
}
func NewAutoDBTypeConverter[TDBData DBDataConstraint, TModelData DatabaseConvertible[TModelData, TDBData]](obj TModelData) DBTypeConverter {
return &dbTypeConverterImpl[TModelData, TDBData]{
dbTypeString: fmt.Sprintf("%T", *new(TDBData)),
modelTypeString: fmt.Sprintf("%T", *new(TModelData)),
todb: obj.MarshalToDB,
tomodel: obj.UnmarshalToModel,
}
}
func convertValueToDB(q Queryable, value any) (any, error) {
modelTypeStr := fmt.Sprintf("%T", value)

147
sq/converterDefault.go Normal file
View File

@@ -0,0 +1,147 @@
package sq
import (
"errors"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rfctime"
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
"time"
)
// ========================== COMMON DATATYPES ==========================
var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) {
return langext.Conditional(v, int64(1), int64(0)), nil
}, func(v int64) (bool, error) {
if v == 0 {
return false, nil
}
if v == 1 {
return true, nil
}
return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v))
})
var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) {
return v.UnixMilli(), nil
}, func(v int64) (time.Time, error) {
return time.UnixMilli(v), nil
})
// ========================== RFCTIME ==========================
var ConverterRFCUnixMilliTimeToUnixMillis = NewDBTypeConverter[rfctime.UnixMilliTime, int64](func(v rfctime.UnixMilliTime) (int64, error) {
return v.UnixMilli(), nil
}, func(v int64) (rfctime.UnixMilliTime, error) {
return rfctime.NewUnixMilli(time.UnixMilli(v)), nil
})
var ConverterRFCUnixNanoTimeToUnixNanos = NewDBTypeConverter[rfctime.UnixNanoTime, int64](func(v rfctime.UnixNanoTime) (int64, error) {
return v.UnixNano(), nil
}, func(v int64) (rfctime.UnixNanoTime, error) {
return rfctime.NewUnixNano(time.Unix(0, v)), nil
})
var ConverterRFCUnixTimeToUnixSeconds = NewDBTypeConverter[rfctime.UnixTime, int64](func(v rfctime.UnixTime) (int64, error) {
return v.Unix(), nil
}, func(v int64) (rfctime.UnixTime, error) {
return rfctime.NewUnix(time.Unix(v, 0)), nil
})
// ConverterRFC339TimeToString
// Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter
var ConverterRFC339TimeToString = NewDBTypeConverter[rfctime.RFC3339Time, string](func(v rfctime.RFC3339Time) (string, error) {
return v.Time().In(time.UTC).Format("2006-01-02 15:04:05"), nil
}, func(v string) (rfctime.RFC3339Time, error) {
t, err := time.Parse("2006-01-02 15:04:05", v)
if err != nil {
return rfctime.RFC3339Time{}, err
}
return rfctime.NewRFC3339(t), nil
})
// ConverterRFC339NanoTimeToString
// Does not really use RFC339 - but sqlite does not understand timezones and the `T` delimiter
var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime, string](func(v rfctime.RFC3339NanoTime) (string, error) {
return v.Time().In(time.UTC).Format("2006-01-02 15:04:05.999999999"), nil
}, func(v string) (rfctime.RFC3339NanoTime, error) {
t, err := time.ParseInLocation("2006-01-02 15:04:05.999999999", v, time.UTC)
if err != nil {
return rfctime.RFC3339NanoTime{}, err
}
return rfctime.NewRFC3339Nano(t), nil
})
var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) {
return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil
}, func(v string) (rfctime.Date, error) {
d := rfctime.Date{}
if err := d.ParseString(v); err != nil {
return rfctime.Date{}, err
} else {
return d, nil
}
})
var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) {
return v.SerializeShort(), nil
}, func(v string) (rfctime.Time, error) {
res := rfctime.Time{}
err := res.Deserialize(v)
if err != nil {
return rfctime.Time{}, err
}
return res, nil
})
var ConverterRFCSecondsF64ToString = NewDBTypeConverter[rfctime.SecondsF64, float64](func(v rfctime.SecondsF64) (float64, error) {
return v.Seconds(), nil
}, func(v float64) (rfctime.SecondsF64, error) {
return rfctime.NewSecondsF64(timeext.FromSeconds(v)), nil
})
// ========================== JSON ==========================
var ConverterJsonObjToString = NewAutoDBTypeConverter(JsonObj{})
var ConverterJsonArrToString = NewAutoDBTypeConverter(JsonArr{})
// Json[T] must be registered manually for each gen-type
// ========================== EXERR ==========================
var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) {
return v.Category, nil
}, func(v string) (exerr.ErrorCategory, error) {
for _, cat := range exerr.AllCategories {
if cat.Category == v {
return cat, nil
}
}
return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory")
})
var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) {
return v.Severity, nil
}, func(v string) (exerr.ErrorSeverity, error) {
for _, sev := range exerr.AllSeverities {
if sev.Severity == v {
return sev, nil
}
}
return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity")
})
var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) {
return v.Key, nil
}, func(v string) (exerr.ErrorType, error) {
for _, etp := range exerr.ListRegisteredTypes() {
if etp.Key == v {
return etp, nil
}
}
return exerr.NewType(v, nil), nil
})

View File

@@ -7,6 +7,7 @@ import (
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"sync"
"time"
)
type DB interface {
@@ -17,7 +18,11 @@ type DB interface {
AddListener(listener Listener)
Exit() error
RegisterConverter(DBTypeConverter)
RegisterDefaultConverter()
}
type DBOptions struct {
RegisterDefaultConverter *bool
RegisterCommentTrimmer *bool
}
type database struct {
@@ -28,13 +33,23 @@ type database struct {
conv []DBTypeConverter
}
func NewDB(db *sqlx.DB) DB {
return &database{
func NewDB(db *sqlx.DB, opt DBOptions) DB {
sqdb := &database{
db: db,
txctr: 0,
lock: sync.Mutex{},
lstr: make([]Listener, 0),
}
if langext.Coalesce(opt.RegisterDefaultConverter, true) {
sqdb.registerDefaultConverter()
}
if langext.Coalesce(opt.RegisterCommentTrimmer, true) {
sqdb.AddListener(CommentTrimmer)
}
return sqdb
}
func (db *database) AddListener(listener Listener) {
@@ -43,89 +58,121 @@ func (db *database) AddListener(listener Listener) {
func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreExecMeta{Context: ctx, TransactionConstructorContext: nil}
for _, v := range db.lstr {
err := v.PreExec(ctx, nil, &sqlstr, &prep)
err := v.PreExec(ctx, nil, &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
res, err := db.db.NamedExecContext(ctx, sqlstr, prep)
postMeta := PostExecMeta{Context: ctx, TransactionConstructorContext: nil, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostExec(nil, origsql, sqlstr, prep)
v.PostExec(nil, origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
return res, nil
}
func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreQueryMeta{Context: ctx, TransactionConstructorContext: nil}
for _, v := range db.lstr {
err := v.PreQuery(ctx, nil, &sqlstr, &prep)
err := v.PreQuery(ctx, nil, &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
rows, err := sqlx.NamedQueryContext(ctx, db.db, sqlstr, prep)
postMeta := PostQueryMeta{Context: ctx, TransactionConstructorContext: nil, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostQuery(nil, origsql, sqlstr, prep)
v.PostQuery(nil, origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to [query] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
return rows, nil
}
func (db *database) Ping(ctx context.Context) error {
t0 := time.Now()
preMeta := PrePingMeta{Context: ctx}
for _, v := range db.lstr {
err := v.PrePing(ctx)
err := v.PrePing(ctx, preMeta)
if err != nil {
return err
}
}
t1 := time.Now()
err := db.db.PingContext(ctx)
postMeta := PostPingMeta{Context: ctx, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostPing(err)
v.PostPing(err, postMeta)
}
if err != nil {
return exerr.Wrap(err, "Failed to [ping] sql database").Build()
}
return nil
}
func (db *database) BeginTransaction(ctx context.Context, iso sql.IsolationLevel) (Tx, error) {
t0 := time.Now()
db.lock.Lock()
txid := db.txctr
db.txctr += 1 // with overflow !
db.lock.Unlock()
preMeta := PreTxBeginMeta{Context: ctx}
for _, v := range db.lstr {
err := v.PreTxBegin(ctx, txid)
err := v.PreTxBegin(ctx, txid, preMeta)
if err != nil {
return nil, err
}
}
t1 := time.Now()
xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso})
postMeta := PostTxBeginMeta{Context: ctx, Init: t0, Start: t1, End: time.Now()}
for _, v := range db.lstr {
v.PostTxBegin(txid, err, postMeta)
}
if err != nil {
return nil, exerr.Wrap(err, "Failed to start sql transaction").Build()
}
for _, v := range db.lstr {
v.PostTxBegin(txid, err)
}
return NewTransaction(xtx, txid, db), nil
return newTransaction(ctx, xtx, txid, db), nil
}
func (db *database) Exit() error {
@@ -141,7 +188,7 @@ func (db *database) RegisterConverter(conv DBTypeConverter) {
db.conv = append(db.conv, conv)
}
func (db *database) RegisterDefaultConverter() {
func (db *database) registerDefaultConverter() {
db.RegisterConverter(ConverterBoolToBit)
db.RegisterConverter(ConverterTimeToUnixMillis)

View File

@@ -47,3 +47,10 @@ func NewSimplePaginateFilter(filterClause string, filterParams PP, sort []Filter
},
}
}
func NewEmptyPaginateFilter() PaginateFilter {
return genericPaginateFilter{
sql: func(params PP) (string, string, []string) { return "1=1", "", nil },
sort: func() []FilterSort { return make([]FilterSort, 0) },
}
}

View File

@@ -31,7 +31,7 @@ func HashMattnSqliteSchema(ctx context.Context, schemaStr string) (string, error
return "", err
}
db := NewDB(xdb)
db := NewDB(xdb, DBOptions{})
_, err = db.Exec(ctx, schemaStr, PP{})
if err != nil {
@@ -59,7 +59,7 @@ func HashGoSqliteSchema(ctx context.Context, schemaStr string) (string, error) {
return "", err
}
db := NewDB(xdb)
db := NewDB(xdb, DBOptions{})
_, err = db.Exec(ctx, schemaStr, PP{})
if err != nil {

View File

@@ -1,5 +1,59 @@
package sq
import "encoding/json"
type JsonObj map[string]any
func (j JsonObj) MarshalToDB(v JsonObj) (string, error) {
mrsh, err := json.Marshal(v)
if err != nil {
return "", err
}
return string(mrsh), nil
}
func (j JsonObj) UnmarshalToModel(v string) (JsonObj, error) {
var mrsh JsonObj
if err := json.Unmarshal([]byte(v), &mrsh); err != nil {
return JsonObj{}, err
}
return mrsh, nil
}
type JsonArr []any
func (j JsonArr) MarshalToDB(v JsonArr) (string, error) {
mrsh, err := json.Marshal(v)
if err != nil {
return "", err
}
return string(mrsh), nil
}
func (j JsonArr) UnmarshalToModel(v string) (JsonArr, error) {
var mrsh JsonArr
if err := json.Unmarshal([]byte(v), &mrsh); err != nil {
return JsonArr{}, err
}
return mrsh, nil
}
type AutoJson[T any] struct {
Value T
}
func (j AutoJson[T]) MarshalToDB(v AutoJson[T]) (string, error) {
mrsh, err := json.Marshal(v.Value)
if err != nil {
return "", err
}
return string(mrsh), nil
}
func (j AutoJson[T]) UnmarshalToModel(v string) (AutoJson[T], error) {
mrsh := *new(T)
if err := json.Unmarshal([]byte(v), &mrsh); err != nil {
return AutoJson[T]{}, err
}
return AutoJson[T]{Value: mrsh}, nil
}

48
sq/list.go Normal file
View File

@@ -0,0 +1,48 @@
package sq
import (
"context"
"fmt"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
)
func Iterate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int, consumer func(ctx context.Context, v TData) error) (int, error) {
if filter == nil {
filter = NewEmptyPaginateFilter()
}
prepParams := PP{}
sortOrder := filter.Sort()
sortCond := ""
if len(sortOrder) > 0 {
sortCond = "ORDER BY "
for i, v := range sortOrder {
if i > 0 {
sortCond += ", "
}
sortCond += v.Field + " " + string(v.Direction)
}
}
pageCond := ""
if limit != nil {
pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1)))
}
filterCond, joinCond, joinTables := filter.SQL(prepParams)
selectCond := table + ".*"
for _, v := range joinTables {
selectCond += ", " + v + ".*"
}
sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond
rows, err := q.Query(ctx, sqlQueryData, prepParams)
if err != nil {
return 0, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build()
}
return IterateAll[TData](ctx, q, rows, scanMode, scanSec, true, consumer)
}

View File

@@ -1,19 +1,266 @@
package sq
import "context"
import (
"context"
"time"
)
type PrePingMeta struct {
Context context.Context
}
type PreTxBeginMeta struct {
Context context.Context
ConstructorContext context.Context
}
type PreTxCommitMeta struct {
ConstructorContext context.Context
}
type PreTxRollbackMeta struct {
ConstructorContext context.Context
}
type PreQueryMeta struct {
Context context.Context
TransactionConstructorContext context.Context
}
type PreExecMeta struct {
Context context.Context
TransactionConstructorContext context.Context
}
type PostPingMeta struct {
Context context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostTxBeginMeta struct {
Context context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostTxCommitMeta struct {
ConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
ExecCounter int
QueryCounter int
}
type PostTxRollbackMeta struct {
ConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
ExecCounter int
QueryCounter int
}
type PostQueryMeta struct {
Context context.Context
TransactionConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
}
type PostExecMeta struct {
Context context.Context
TransactionConstructorContext context.Context
Init time.Time
Start time.Time
End time.Time
}
type Listener interface {
PrePing(ctx context.Context) error
PreTxBegin(ctx context.Context, txid uint16) error
PreTxCommit(txid uint16) error
PreTxRollback(txid uint16) error
PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error
PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error
PrePing(ctx context.Context, meta PrePingMeta) error
PreTxBegin(ctx context.Context, txid uint16, meta PreTxBeginMeta) error
PreTxCommit(txid uint16, meta PreTxCommitMeta) error
PreTxRollback(txid uint16, meta PreTxRollbackMeta) error
PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error
PreExec(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error
PostPing(result error)
PostTxBegin(txid uint16, result error)
PostTxCommit(txid uint16, result error)
PostTxRollback(txid uint16, result error)
PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP)
PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP)
PostPing(result error, meta PostPingMeta)
PostTxBegin(txid uint16, result error, meta PostTxBeginMeta)
PostTxCommit(txid uint16, result error, meta PostTxCommitMeta)
PostTxRollback(txid uint16, result error, meta PostTxRollbackMeta)
PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)
PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)
}
type genListener struct {
prePing func(ctx context.Context, meta PrePingMeta) error
preTxBegin func(ctx context.Context, txid uint16, meta PreTxBeginMeta) error
preTxCommit func(txid uint16, meta PreTxCommitMeta) error
preTxRollback func(txid uint16, meta PreTxRollbackMeta) error
preQuery func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error
preExec func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error
postPing func(result error, meta PostPingMeta)
postTxBegin func(txid uint16, result error, meta PostTxBeginMeta)
postTxCommit func(txid uint16, result error, meta PostTxCommitMeta)
postTxRollback func(txid uint16, result error, meta PostTxRollbackMeta)
postQuery func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)
postExec func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)
}
func (g genListener) PrePing(ctx context.Context, meta PrePingMeta) error {
if g.prePing != nil {
return g.prePing(ctx, meta)
} else {
return nil
}
}
func (g genListener) PreTxBegin(ctx context.Context, txid uint16, meta PreTxBeginMeta) error {
if g.preTxBegin != nil {
return g.preTxBegin(ctx, txid, meta)
} else {
return nil
}
}
func (g genListener) PreTxCommit(txid uint16, meta PreTxCommitMeta) error {
if g.preTxCommit != nil {
return g.preTxCommit(txid, meta)
} else {
return nil
}
}
func (g genListener) PreTxRollback(txid uint16, meta PreTxRollbackMeta) error {
if g.preTxRollback != nil {
return g.preTxRollback(txid, meta)
} else {
return nil
}
}
func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error {
if g.preQuery != nil {
return g.preQuery(ctx, txID, sql, params, meta)
} else {
return nil
}
}
func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error {
if g.preExec != nil {
return g.preExec(ctx, txID, sql, params, meta)
} else {
return nil
}
}
func (g genListener) PostPing(result error, meta PostPingMeta) {
if g.postPing != nil {
g.postPing(result, meta)
}
}
func (g genListener) PostTxBegin(txid uint16, result error, meta PostTxBeginMeta) {
if g.postTxBegin != nil {
g.postTxBegin(txid, result, meta)
}
}
func (g genListener) PostTxCommit(txid uint16, result error, meta PostTxCommitMeta) {
if g.postTxCommit != nil {
g.postTxCommit(txid, result, meta)
}
}
func (g genListener) PostTxRollback(txid uint16, result error, meta PostTxRollbackMeta) {
if g.postTxRollback != nil {
g.postTxRollback(txid, result, meta)
}
}
func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) {
if g.postQuery != nil {
g.postQuery(txID, sqlOriginal, sqlReal, params, result, meta)
}
}
func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) {
if g.postExec != nil {
g.postExec(txID, sqlOriginal, sqlReal, params, result, meta)
}
}
func NewPrePingListener(f func(ctx context.Context, meta PrePingMeta) error) Listener {
return genListener{prePing: f}
}
func NewPreTxBeginListener(f func(ctx context.Context, txid uint16, meta PreTxBeginMeta) error) Listener {
return genListener{preTxBegin: f}
}
func NewPreTxCommitListener(f func(txid uint16, meta PreTxCommitMeta) error) Listener {
return genListener{preTxCommit: f}
}
func NewPreTxRollbackListener(f func(txid uint16, meta PreTxRollbackMeta) error) Listener {
return genListener{preTxRollback: f}
}
func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error) Listener {
return genListener{preQuery: f}
}
func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error) Listener {
return genListener{preExec: f}
}
func NewPreListener(f func(ctx context.Context, cmdtype string, txID *uint16, sql *string, params *PP) error) Listener {
return genListener{
preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error {
return f(ctx, "EXEC", txID, sql, params)
},
preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error {
return f(ctx, "QUERY", txID, sql, params)
},
}
}
func NewPostPingListener(f func(result error, meta PostPingMeta)) Listener {
return genListener{postPing: f}
}
func NewPostTxBeginListener(f func(txid uint16, result error, meta PostTxBeginMeta)) Listener {
return genListener{postTxBegin: f}
}
func NewPostTxCommitListener(f func(txid uint16, result error, meta PostTxCommitMeta)) Listener {
return genListener{postTxCommit: f}
}
func NewPostTxRollbackListener(f func(txid uint16, result error, meta PostTxRollbackMeta)) Listener {
return genListener{postTxRollback: f}
}
func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)) Listener {
return genListener{postQuery: f}
}
func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)) Listener {
return genListener{postExec: f}
}
func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, result error, params PP)) Listener {
return genListener{
postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) {
f("EXEC", txID, sqlOriginal, sqlReal, result, params)
},
postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) {
f("QUERY", txID, sqlOriginal, sqlReal, result, params)
},
}
}

View File

@@ -9,6 +9,10 @@ import (
)
func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) {
if filter == nil {
filter = NewEmptyPaginateFilter()
}
prepParams := PP{}
sortOrder := filter.Sort()
@@ -90,6 +94,10 @@ func Paginate[TData any](ctx context.Context, q Queryable, table string, filter
}
func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) {
if filter == nil {
filter = NewEmptyPaginateFilter()
}
prepParams := PP{}
filterCond, joinCond, _ := filter.SQL(prepParams)

View File

@@ -333,3 +333,79 @@ func ScanAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode
}
return res, nil
}
func IterateAll[TData any](ctx context.Context, q Queryable, rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool, consumer func(ctx context.Context, v TData) error) (int, error) {
var strscan *StructScanner
if sec == Safe {
strscan = NewStructScanner(rows, false)
var data TData
err := strscan.Start(&data)
if err != nil {
return 0, err
}
} else if sec == Unsafe {
strscan = NewStructScanner(rows, true)
var data TData
err := strscan.Start(&data)
if err != nil {
return 0, err
}
} else {
return 0, errors.New("unknown value for <sec>")
}
rcount := 0
for rows.Next() {
if err := ctx.Err(); err != nil {
return rcount, err
}
if mode == SModeFast {
var data TData
err := strscan.StructScanBase(&data)
if err != nil {
return rcount, err
}
err = consumer(ctx, data)
if err != nil {
return rcount, exerr.Wrap(err, "").Build()
}
rcount++
} else if mode == SModeExtended {
var data TData
err := strscan.StructScanExt(q, &data)
if err != nil {
return rcount, err
}
err = consumer(ctx, data)
if err != nil {
return rcount, exerr.Wrap(err, "").Build()
}
rcount++
} else {
return rcount, errors.New("unknown value for <mode>")
}
}
if close {
err := strscan.rows.Close()
if err != nil {
return rcount, err
}
}
if err := rows.Err(); err != nil {
return rcount, err
}
return rcount, nil
}

View File

@@ -36,8 +36,7 @@ func TestInsertSingle(t *testing.T) {
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
db := NewDB(xdb)
db.RegisterDefaultConverter()
db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue})
_, err := db.Exec(ctx, `
CREATE TABLE requests (
@@ -90,8 +89,7 @@ func TestUpdateSingle(t *testing.T) {
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
db := NewDB(xdb)
db.RegisterDefaultConverter()
db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue})
_, err := db.Exec(ctx, `
CREATE TABLE requests (
@@ -176,8 +174,7 @@ func TestInsertMultiple(t *testing.T) {
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
db := NewDB(xdb)
db.RegisterDefaultConverter()
db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue})
_, err := db.Exec(ctx, `
CREATE TABLE requests (

View File

@@ -36,8 +36,7 @@ func TestTypeConverter1(t *testing.T) {
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
db := NewDB(xdb)
db.RegisterDefaultConverter()
db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue})
_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{})
tst.AssertNoErr(t, err)
@@ -71,8 +70,7 @@ func TestTypeConverter2(t *testing.T) {
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
db := NewDB(xdb)
db.RegisterDefaultConverter()
db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue})
_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NOT NULL, PRIMARY KEY (id) ) STRICT", PP{})
tst.AssertNoErr(t, err)
@@ -116,8 +114,7 @@ func TestTypeConverter3(t *testing.T) {
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
db := NewDB(xdb)
db.RegisterDefaultConverter()
db := NewDB(xdb, DBOptions{RegisterDefaultConverter: langext.PTrue})
_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NULL, PRIMARY KEY (id) ) STRICT", PP{})
tst.AssertNoErr(t, err)

View File

@@ -6,6 +6,7 @@ import (
"github.com/jmoiron/sqlx"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"time"
)
type TxStatus string
@@ -26,62 +27,78 @@ type Tx interface {
}
type transaction struct {
tx *sqlx.Tx
id uint16
status TxStatus
execCtr int
queryCtr int
db *database
constructorContext context.Context
tx *sqlx.Tx
id uint16
status TxStatus
execCtr int
queryCtr int
db *database
}
func NewTransaction(xtx *sqlx.Tx, txid uint16, db *database) Tx {
func newTransaction(ctx context.Context, xtx *sqlx.Tx, txid uint16, db *database) Tx {
return &transaction{
tx: xtx,
id: txid,
status: TxStatusInitial,
execCtr: 0,
queryCtr: 0,
db: db,
constructorContext: ctx,
tx: xtx,
id: txid,
status: TxStatusInitial,
execCtr: 0,
queryCtr: 0,
db: db,
}
}
func (tx *transaction) Rollback() error {
t0 := time.Now()
preMeta := PreTxRollbackMeta{ConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreTxRollback(tx.id)
err := v.PreTxRollback(tx.id, preMeta)
if err != nil {
return exerr.Wrap(err, "failed to call SQL pre-rollback listener").Int("tx.id", int(tx.id)).Build()
}
}
t1 := time.Now()
result := tx.tx.Rollback()
if result == nil {
tx.status = TxStatusRollback
}
postMeta := PostTxRollbackMeta{ConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now(), ExecCounter: tx.execCtr, QueryCounter: tx.queryCtr}
for _, v := range tx.db.lstr {
v.PostTxRollback(tx.id, result)
v.PostTxRollback(tx.id, result, postMeta)
}
return result
}
func (tx *transaction) Commit() error {
t0 := time.Now()
preMeta := PreTxCommitMeta{ConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreTxCommit(tx.id)
err := v.PreTxCommit(tx.id, preMeta)
if err != nil {
return exerr.Wrap(err, "failed to call SQL pre-commit listener").Int("tx.id", int(tx.id)).Build()
}
}
t1 := time.Now()
result := tx.tx.Commit()
if result == nil {
tx.status = TxStatusComitted
}
postMeta := PostTxCommitMeta{ConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now(), ExecCounter: tx.execCtr, QueryCounter: tx.queryCtr}
for _, v := range tx.db.lstr {
v.PostTxRollback(tx.id, result)
v.PostTxCommit(tx.id, result, postMeta)
}
return result
@@ -89,21 +106,29 @@ func (tx *transaction) Commit() error {
func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreExecMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep)
err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
res, err := tx.tx.NamedExecContext(ctx, sqlstr, prep)
tx.execCtr++
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
postMeta := PostExecMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now()}
for _, v := range tx.db.lstr {
v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep)
v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep, err, postMeta)
}
if err != nil {
@@ -114,21 +139,29 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re
func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) {
origsql := sqlstr
t0 := time.Now()
preMeta := PreQueryMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext}
for _, v := range tx.db.lstr {
err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep)
err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep, preMeta)
if err != nil {
return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build()
}
}
t1 := time.Now()
rows, err := sqlx.NamedQueryContext(ctx, tx.tx, sqlstr, prep)
tx.queryCtr++
if tx.status == TxStatusInitial && err == nil {
tx.status = TxStatusActive
}
postMeta := PostQueryMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now()}
for _, v := range tx.db.lstr {
v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep)
v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep, err, postMeta)
}
if err != nil {

44
timeext/diff.go Normal file
View File

@@ -0,0 +1,44 @@
package timeext
import "time"
// YearDifference calculates the difference between two timestamps in years.
// = t1 - t2
// returns a float value
func YearDifference(t1 time.Time, t2 time.Time, tz *time.Location) float64 {
yDelta := float64(t1.Year() - t2.Year())
processT1 := float64(t1.Sub(TimeToYearStart(t1, tz))) / float64(TimeToYearEnd(t1, tz).Sub(TimeToYearStart(t1, tz)))
processT2 := float64(t2.Sub(TimeToYearStart(t2, tz))) / float64(TimeToYearEnd(t2, tz).Sub(TimeToYearStart(t2, tz)))
return yDelta + (processT1 - processT2)
}
// MonthDifference calculates the difference between two timestamps in months.
// = t1 - t2
// returns a float value
func MonthDifference(t1 time.Time, t2 time.Time) float64 {
yDelta := float64(t1.Year() - t2.Year())
mDelta := float64(t1.Month() - t2.Month())
dDelta := float64(0)
t1MonthDays := DaysInMonth(t1)
t2MonthDays := DaysInMonth(t2)
if t2.Year() > t1.Year() || (t2.Year() == t1.Year() && t2.Month() > t1.Month()) {
dDelta -= 1
dDelta += float64(t1MonthDays-t1.Day()) / float64(t1MonthDays)
dDelta += float64(t2.Day()) / float64(t2MonthDays)
} else if t2.Year() < t1.Year() || (t2.Year() == t1.Year() && t2.Month() < t1.Month()) {
dDelta -= 1
dDelta += float64(t1.Day()) / float64(t1MonthDays)
dDelta += float64(t2MonthDays-t2.Day()) / float64(t2MonthDays)
} else {
dDelta += float64(t1.Day()-t2.Day()) / float64(t1MonthDays)
}
return yDelta*12 + mDelta + dDelta
}

143
timeext/diff_test.go Normal file
View File

@@ -0,0 +1,143 @@
package timeext
import (
"math"
"testing"
"time"
)
func TestYearDifferenceWithSameYearAndDay(t *testing.T) {
t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.0
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithOneYearApart(t *testing.T) {
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 1.0
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithDifferentMonths(t *testing.T) {
t1 := time.Date(2020, 6, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.4166666666666667 // Approximation of 5/12 months
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceAcrossYears(t *testing.T) {
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2020, 6, 1, 0, 0, 0, 0, time.UTC)
expected := 0.5833333333333334 // Approximation of 7/12 months
result := YearDifference(t1, t2, time.UTC)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithTimezone(t *testing.T) {
tz, _ := time.LoadLocation("America/New_York")
t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, tz)
t2 := time.Date(2020, 6, 1, 0, 0, 0, 0, tz)
expected := 0.5833333333333334 // Same as UTC but ensuring timezone is considered
result := YearDifference(t1, t2, tz)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithNegativeDifference(t *testing.T) {
t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, TimezoneBerlin)
t2 := time.Date(2021, 1, 1, 0, 0, 0, 0, TimezoneBerlin)
expected := -1.0
result := YearDifference(t1, t2, TimezoneBerlin)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestYearDifferenceWithNegativeDifference2(t *testing.T) {
t1 := time.Date(2020, 7, 1, 0, 0, 0, 0, TimezoneBerlin)
t2 := time.Date(2021, 7, 1, 0, 0, 0, 0, TimezoneBerlin)
expected := -1.0
result := YearDifference(t1, t2, TimezoneBerlin)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func epsilonEquals(a, b float64) bool {
epsilon := 0.01
return math.Abs(a-b) < epsilon
}
func TestMonthDifferenceSameDate(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
expected := 0.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceSameMonth(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 31, 0, 0, 0, 0, time.UTC)
expected := 0.967741935483871 // Approximation of 30/31 days
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceDifferentMonthsSameYear(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 3, 1, 0, 0, 0, 0, time.UTC)
expected := 2.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceDifferentYears(t *testing.T) {
t1 := time.Date(2021, 12, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 2, 1, 0, 0, 0, 0, time.UTC)
expected := 2.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceT1BeforeT2(t *testing.T) {
t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 6, 1, 0, 0, 0, 0, time.UTC)
expected := 5.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
func TestMonthDifferenceT1AfterT2(t *testing.T) {
t1 := time.Date(2022, 6, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC)
expected := -5.0
result := MonthDifference(t2, t1)
if !epsilonEquals(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}

Some files were not shown because too many files have changed in this diff Show More