Compare commits
	
		
			245 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 2504ef00a0 | |||
| fc5803493c | |||
| a9295bfabf | |||
| 12fa53d848 | |||
| d2bb362135 | |||
| 9dd81f6bd5 | |||
| d2c04afcd5 | |||
| 62980e1489 | |||
| 59963adf74 | |||
| 194ea4ace5 | |||
| 73b80a66bc | |||
| d8b2d01274 | |||
| bfa8457e95 | |||
| 70106733d9 | |||
| ce7837b9ef | |||
| d0d72167eb | |||
| a55ee1a6ce | |||
| dfc319573c | |||
| 246e555f3f | |||
| c28bc086b2 | |||
| d44e971325 | |||
| fe4cdc48af | |||
| 631006a4e1 | |||
| 567ead8697 | |||
| e4886b4a7d | |||
| dcb5d3d7cd | |||
| 15a639f85a | |||
| 303bd04649 | |||
| 7bda674939 | |||
| 126d4fbd0b | |||
| fed8bccaab | |||
| 47b6a6b508 | |||
| 764ce79a71 | |||
| b876c64ba2 | |||
| 8d52b41f57 | |||
| f47e2a33fe | |||
| 9321938dad | |||
| 3828d601a2 | |||
| 2e713c808d | |||
| 6602f86b43 | |||
| 24d9f0fdc7 | |||
| 8446b2da22 | |||
| 758e5a67b5 | |||
| 678ddd7124 | |||
| 36b71dfaf3 | |||
| 9491b72b8d | |||
| 6c4af4006b | |||
| 8bf3a337cf | |||
| 16146494dc | |||
| b0e443ad99 | |||
| 9955eacf96 | |||
| f0347a9435 | |||
| 7c869c65f3 | |||
| 14f39a9162 | |||
| dcd106c1cd | |||
| b704e2a362 | |||
| 6b4bd5a6f8 | |||
| 6df4f5f2a1 | |||
| 780905ba35 | |||
| c679797765 | |||
| 401aad9fa4 | |||
| 645113d553 | |||
| 4a33986b6a | |||
| c1c8c64c76 | |||
| 0927fdc4d7 | |||
| 102a280dda | |||
| f13384d794 | |||
| 409d6e108d | |||
| ed53f297bd | |||
| 42424f4bc2 | |||
| 9e5b8c5277 | |||
| 9abe28c490 | |||
| 422bbd8593 | |||
| 3956675e04 | |||
| 10c3780b52 | |||
| 8edc067a3b | |||
| 1007f2c834 | |||
| c25da03217 | |||
| 4b55dbaacf | |||
| c399fa42ae | |||
| 9e586f7706 | |||
| 3cc8dccc63 | |||
| 7fedfbca81 | |||
| 3c439ba428 | |||
| ad24f6db44 | |||
| 1869ff3d75 | |||
| 30ce8c4b60 | |||
| 885bb53244 | |||
| 1c7dc1820a | |||
| 7e16e799e4 | |||
| 890e16241d | |||
| b9d0348735 | |||
| b9e9575b9b | |||
| 295a098eb4 | |||
| b69a082bb1 | |||
| a4a8c83d17 | |||
| e952176bb0 | |||
| d99adb203b | |||
| f1f91f4cfa | |||
| 2afb265ea4 | |||
| be24f7a190 | |||
| aae8a706e9 | |||
| 7d64f18f54 | |||
| d08b2e565a | |||
| d29e84894d | |||
| 617298c366 | |||
| 668f308565 | |||
| 240a8ed7aa | |||
| 70de8e8d04 | |||
| d38fa60fbc | |||
| 5fba7e0e2f | |||
| 8757643399 | |||
| 42bd4cf58d | |||
| 413178e2d3 | |||
| 9264a2e99b | |||
| 2a0471fb3d | |||
| 1497c013f9 | |||
| ef78b7467b | |||
| 0eda32b725 | |||
| f9ccafb976 | |||
| 6e90239fef | |||
| 05580c384a | |||
| 3188b951fb | |||
| 6b211d1443 | |||
| b2b9b40792 | |||
| 2f915cb6c1 | |||
| b2b93f570a | |||
| 8247fc4524 | |||
| 5dad44ad09 | |||
| f042183433 | |||
| b0be93a7a0 | |||
| 1c143921e6 | |||
| 68e63a9cf6 | |||
| c3162fec95 | |||
| 1124aa781a | |||
| eef0e9f2aa | |||
| af38b06d22 | |||
| 2fad6340c7 | |||
| 03aa0a2282 | |||
| 358c238f3d | |||
| d65ac8ba2b | |||
| 55d02b8c65 | |||
| 8a3965f666 | |||
| 4aa2f494b1 | |||
| 8f13eb2f16 | |||
| 8f15d42173 | |||
| 07fa21dcca | |||
| e657de7f78 | |||
| c534e998e8 | |||
| 88642770c5 | |||
| 8528b5cb66 | |||
| 5ba84bd8ee | |||
| 1260b2dc77 | |||
| 7d18b913c6 | |||
| d1f9069f2f | |||
| fa6d73301e | |||
| bfe62799d3 | |||
| ede912eb7b | |||
| ff8f128fe8 | |||
| 1971f1396f | |||
| bf6c184d12 | |||
| 770f5c5c64 | |||
| 623c021689 | |||
| afcc89bf9e | |||
| 1672e8f8fd | |||
| 398ed56d32 | |||
| f3ecba3883 | |||
| 45031b05cf | |||
| 7413ea045d | |||
| 62c9a4e734 | |||
| 3a8baaa6d9 | |||
| 498785e213 | |||
| 678f95642c | |||
| dacc97e2ce | |||
| f8c0c0afa0 | |||
| 2fbd5cf965 | |||
| 75f71fe3db | |||
| ab1a1ab6f6 | |||
| 19ee5019ef | |||
| 42b68507f2 | |||
| 9d0047a11e | |||
| 06d81f1682 | |||
| 7b8ab03779 | |||
| 07cbcf5a0a | |||
| da41ec3e84 | |||
| 592fae25af | |||
| 7968460fa2 | |||
| b808c5727c | |||
| 796f7956b8 | |||
| 1e6b92d1d9 | |||
| 0b85fa5af9 | |||
| c3318cc1de | |||
| fbf4d7b915 | |||
| 9cc0abf9e0 | |||
| 7c40bcfd3c | |||
| 05636a1e4d | |||
| 0f52b860ea | |||
| b5cd116219 | |||
| 98486842ae | |||
| 7577a2dd47 | |||
| 08681756b6 | |||
| 64772d0474 | |||
| 127764556e | |||
| 170f43d806 | |||
| 9dffc41274 | |||
| c63cf442f8 | |||
| a2ba283632 | |||
| 4a1fb1ae18 | |||
| a127b24e62 | |||
| 69d6290376 | |||
| c08a739158 | |||
| 5f5f0e44f0 | |||
| 6e6797eac5 | |||
| cd9406900a | |||
| 6c81f7f6bc | |||
| d56a0235af | |||
| de2ca763c1 | |||
| da52bb5c90 | |||
| 3d4afe7b25 | |||
| f5766d639c | |||
| cdf2a6e76b | |||
| 6d7cfb86f8 | |||
| 1e9d663ffe | |||
| 5b8d7ebf87 | |||
| 11dc6d2640 | |||
| 29a3f73f15 | |||
| 98105642fc | |||
| 0fd5f3b417 | |||
| 43cac4b3bb | |||
| cd68af8e66 | |||
| 113d838876 | |||
| 9e5bc0d3ea | |||
| 6d3bd13f61 | |||
| b5ca475b3f | |||
| a75b1291cb | |||
| 21cd1ee066 | |||
| ae43cbb623 | |||
| 9b752a911c | |||
| ec9ac26a4c | |||
| 39a0b73d56 | |||
| 2e2e15d4d2 | |||
| 0d16946aba | |||
| 14441c2378 | |||
| f6bcdc9903 | |||
| a95053211c | 
							
								
								
									
										55
									
								
								.gitea/workflows/tests.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								.gitea/workflows/tests.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | |||||||
|  |  | ||||||
|  | # https://docs.gitea.com/next/usage/actions/quickstart | ||||||
|  | # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions | ||||||
|  | # https://docs.github.com/en/actions/learn-github-actions/contexts#github-context | ||||||
|  |  | ||||||
|  | name: Build Docker and Deploy | ||||||
|  | run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }} | ||||||
|  |  | ||||||
|  | on: | ||||||
|  |   push: | ||||||
|  |     branches: | ||||||
|  |       - '*' | ||||||
|  |       - '**' | ||||||
|  |  | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   run_tests: | ||||||
|  |     name: Run goext test-suite   | ||||||
|  |     runs-on: bfb-cicd-latest | ||||||
|  |     steps: | ||||||
|  |  | ||||||
|  |       - name: Check out code | ||||||
|  |         uses: actions/checkout@v3 | ||||||
|  |  | ||||||
|  |       - name: Setup go | ||||||
|  |         uses: actions/setup-go@v4 | ||||||
|  |         with: | ||||||
|  |           go-version-file: '${{ gitea.workspace }}/go.mod' | ||||||
|  |  | ||||||
|  |       - name: Setup packages | ||||||
|  |         uses: awalsh128/cache-apt-pkgs-action@latest | ||||||
|  |         with: | ||||||
|  |           packages: curl python3 | ||||||
|  |           version: 1.0 | ||||||
|  |  | ||||||
|  |       - name: go version | ||||||
|  |         run: go version | ||||||
|  |  | ||||||
|  |       - name: Run tests | ||||||
|  |         run: cd "${{ gitea.workspace }}" && make test | ||||||
|  |  | ||||||
|  |       - name: Send failure mail | ||||||
|  |         if: failure() | ||||||
|  |         uses: dawidd6/action-send-mail@v3 | ||||||
|  |         with: | ||||||
|  |           server_address: smtp.fastmail.com | ||||||
|  |           server_port: 465 | ||||||
|  |           secure: true | ||||||
|  |           username: ${{secrets.MAIL_USERNAME}} | ||||||
|  |           password: ${{secrets.MAIL_PASSWORD}} | ||||||
|  |           subject: Pipeline on '${{ gitea.repository }}' failed | ||||||
|  |           to: ${{ steps.commiter_info.outputs.MAIL }} | ||||||
|  |           from: Gitea Actions <gitea_actions@blackforestbytes.de> | ||||||
|  |           body: "Go to https://gogs.blackforestbytes.com/${{ gitea.repository }}/actions" | ||||||
|  |  | ||||||
							
								
								
									
										2
									
								
								.idea/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.idea/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -6,3 +6,5 @@ | |||||||
| # Datasource local storage ignored files | # Datasource local storage ignored files | ||||||
| /dataSources/ | /dataSources/ | ||||||
| /dataSources.local.xml | /dataSources.local.xml | ||||||
|  | # GitHub Copilot persisted chat sessions | ||||||
|  | /copilot/chatSessions | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								.idea/golinter.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.idea/golinter.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8"?> | ||||||
|  | <project version="4"> | ||||||
|  |   <component name="GoLinterSettings"> | ||||||
|  |     <option name="checkGoLinterExe" value="false" /> | ||||||
|  |   </component> | ||||||
|  | </project> | ||||||
							
								
								
									
										6
									
								
								.idea/sqldialects.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.idea/sqldialects.xml
									
									
									
										generated
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8"?> | ||||||
|  | <project version="4"> | ||||||
|  |   <component name="SqlDialectMappings"> | ||||||
|  |     <file url="file://$PROJECT_DIR$/sq/sq_test.go" dialect="SQLite" /> | ||||||
|  |   </component> | ||||||
|  | </project> | ||||||
							
								
								
									
										8
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								Makefile
									
									
									
									
									
								
							| @@ -5,7 +5,13 @@ run: | |||||||
| test: | test: | ||||||
| 	# go test ./... | 	# go test ./... | ||||||
| 	which gotestsum || go install gotest.tools/gotestsum@latest | 	which gotestsum || go install gotest.tools/gotestsum@latest | ||||||
| 	gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test" | 	gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./..." | ||||||
|  |  | ||||||
|  | test-in-docker: | ||||||
|  | 	tag="goext_temp_test_image:$(shell uuidgen | tr -d '-')";        \ | ||||||
|  | 	docker build --tag $$tag . -f .gitea/workflows/Dockerfile_tests; \ | ||||||
|  | 	docker run --rm $$tag;                                           \ | ||||||
|  | 	docker rmi $$tag | ||||||
|  |  | ||||||
| version: | version: | ||||||
| 	_data/version.sh | 	_data/version.sh | ||||||
							
								
								
									
										10
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								README.md
									
									
									
									
									
								
							| @@ -11,24 +11,26 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | |||||||
| ### Packages: | ### Packages: | ||||||
|  |  | ||||||
| | Name        | Maintainer | Description                                                                                                   | | | Name        | Maintainer | Description                                                                                                   | | ||||||
| |--------------|------------|---------------------------------------------------------------------------------------------------------------| | |-------------|------------|---------------------------------------------------------------------------------------------------------------| | ||||||
| | langext     | Mike       | General uttility/helper functions, (everything thats missing from go standard library)                        | | | langext     | Mike       | General uttility/helper functions, (everything thats missing from go standard library)                        | | ||||||
| | mathext     | Mike       | Utility/Helper functions for math                                                                             | | | mathext     | Mike       | Utility/Helper functions for math                                                                             | | ||||||
| | cryptext    | Mike       | Utility/Helper functions for encryption                                                                       | | | cryptext    | Mike       | Utility/Helper functions for encryption                                                                       | | ||||||
| | syncext     | Mike       | Utility/Helper funtions for multi-threading / mutex / channels                                                | | | syncext     | Mike       | Utility/Helper funtions for multi-threading / mutex / channels                                                | | ||||||
| | dataext     | Mike       | Various useful data structures                                                                                | | | dataext     | Mike       | Various useful data structures                                                                                | | ||||||
| | zipext      | Mike       | Utility for zip/gzip/tar etc                                                                                  | | | zipext      | Mike       | Utility for zip/gzip/tar etc                                                                                  | | ||||||
| | reflectext   | Mike       | Utility for golagn reflection                                                                                 | | | reflectext  | Mike       | Utility for golang reflection                                                                                 | | ||||||
|  | | fsext       | Mike       | Utility for filesytem access                                                                                  | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | ||||||
| | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | ||||||
|  | | pagination  | Mike       | Pagination implementation                                                                                     | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | | totpext     | Mike       | Implementation of TOTP (2-Factor-Auth)                                                                        | | ||||||
| | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | | termext     | Mike       | Utilities for terminals (mostly color output)                                                                 | | ||||||
| | confext     | Mike       | Parses environment configuration into structs                                                                 | | | confext     | Mike       | Parses environment configuration into structs                                                                 | | ||||||
| | cmdext      | Mike       | Runner for external commands/processes                                                                        | | | cmdext      | Mike       | Runner for external commands/processes                                                                        | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | sq           | Mike       | Utility functions for sql based databases                                                                     | | | sq          | Mike       | Utility functions for sql based databases (primarily sqlite)                                                  | | ||||||
| | tst         | Mike       | Utility functions for unit tests                                                                              | | | tst         | Mike       | Utility functions for unit tests                                                                              | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
| | rfctime     | Mike       | Classes for time seriallization, with different marshallign method for mongo and json                         | | | rfctime     | Mike       | Classes for time seriallization, with different marshallign method for mongo and json                         | | ||||||
| @@ -39,3 +41,5 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | |||||||
| | rext        | Mike       | Regex Wrapper, wraps regexp with a better interface                                                           | | | rext        | Mike       | Regex Wrapper, wraps regexp with a better interface                                                           | | ||||||
| | wmo         | Mike       | Mongo Wrapper, wraps mongodb with a better interface                                                          | | | wmo         | Mike       | Mongo Wrapper, wraps mongodb with a better interface                                                          | | ||||||
| |             |            |                                                                                                               | | |             |            |                                                                                                               | | ||||||
|  | | scn         | Mike       | SimpleCloudNotifier                                                                                           | | ||||||
|  | |             |            |                                                                                                               | | ||||||
							
								
								
									
										12
									
								
								TODO.md
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								TODO.md
									
									
									
									
									
								
							| @@ -2,12 +2,8 @@ | |||||||
|  |  | ||||||
|  - cronext |  - cronext | ||||||
|  |  | ||||||
|  - cursortoken |  - rfctime.HMSTimeOnly | ||||||
|  |  - rfctime.NanoTimeOnly | ||||||
|  |  | ||||||
|  - typed/geenric mongo wrapper |  - remove sqlx dependency from sq  (unmaintained, and mostly superseeded by our own stuff?) | ||||||
|  |  - Move DBLogger and DBPreprocessor to sq | ||||||
|  - error package |  | ||||||
|  |  | ||||||
| - rfctime.DateOnly |  | ||||||
| - rfctime.HMSTimeOnly |  | ||||||
| - rfctime.NanoTimeOnly |  | ||||||
							
								
								
									
										
											BIN
										
									
								
								bfcodegen/_test_example_1.tgz
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								bfcodegen/_test_example_1.tgz
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								bfcodegen/_test_example_2.tgz
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								bfcodegen/_test_example_2.tgz
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										197
									
								
								bfcodegen/csid-generate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										197
									
								
								bfcodegen/csid-generate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,197 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
|  | 	"io" | ||||||
|  | 	"os" | ||||||
|  | 	"path" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"regexp" | ||||||
|  | 	"strings" | ||||||
|  | 	"text/template" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type CSIDDef struct { | ||||||
|  | 	File         string | ||||||
|  | 	FileRelative string | ||||||
|  | 	Name         string | ||||||
|  | 	Prefix       string | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type CSIDGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var rexCSIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
|  | var rexCSIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@csid:type)\s+\[(?P<prefix>[A-Z0-9]{3})].*$`)) | ||||||
|  |  | ||||||
|  | var rexCSIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumCharsetIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|  | //go:embed csid-generate.template | ||||||
|  | var templateCSIDGenerateText string | ||||||
|  |  | ||||||
|  | func GenerateCharsetIDSpecs(sourceDir string, destFile string, opt CSIDGenOptions) error { | ||||||
|  |  | ||||||
|  | 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||||
|  |  | ||||||
|  | 	files, err := os.ReadDir(sourceDir) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	oldChecksum := "N/A" | ||||||
|  | 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||||
|  | 		content, err := os.ReadFile(destFile) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  | 		if m, ok := rexCSIDChecksumConst.MatchFirst(string(content)); ok { | ||||||
|  | 			oldChecksum = m.GroupByName("cs").Value() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") }) | ||||||
|  | 	langext.SortBy(files, func(v os.DirEntry) string { return v.Name() }) | ||||||
|  |  | ||||||
|  | 	newChecksumStr := goext.GoextVersion | ||||||
|  | 	for _, f := range files { | ||||||
|  | 		content, err := os.ReadFile(path.Join(sourceDir, f.Name())) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  | 		newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	newChecksum := cryptext.BytesSha256([]byte(newChecksumStr)) | ||||||
|  |  | ||||||
|  | 	if newChecksum != oldChecksum { | ||||||
|  | 		fmt.Printf("[CSIDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | ||||||
|  | 	} else { | ||||||
|  | 		fmt.Printf("[CSIDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	allIDs := make([]CSIDDef, 0) | ||||||
|  |  | ||||||
|  | 	pkgname := "" | ||||||
|  |  | ||||||
|  | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
|  | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		fileIDs, pn, err := processCSIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
|  | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		allIDs = append(allIDs, fileIDs...) | ||||||
|  |  | ||||||
|  | 		if pn != "" { | ||||||
|  | 			pkgname = pn | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if pkgname == "" { | ||||||
|  | 		return errors.New("no package name found in any file") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	fdata, err := format.Source([]byte(fmtCSIDOutput(newChecksum, allIDs, pkgname))) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func processCSIDFile(basedir string, fn string, debugOutput bool) ([]CSIDDef, string, error) { | ||||||
|  | 	file, err := os.Open(fn) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defer func() { _ = file.Close() }() | ||||||
|  |  | ||||||
|  | 	bin, err := io.ReadAll(file) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	lines := strings.Split(string(bin), "\n") | ||||||
|  |  | ||||||
|  | 	ids := make([]CSIDDef, 0) | ||||||
|  |  | ||||||
|  | 	pkgname := "" | ||||||
|  |  | ||||||
|  | 	for i, line := range lines { | ||||||
|  | 		if i == 0 && strings.HasPrefix(line, "// Code generated by") { | ||||||
|  | 			break | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if match, ok := rexCSIDPackage.MatchFirst(line); i == 0 && ok { | ||||||
|  | 			pkgname = match.GroupByName("name").Value() | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if match, ok := rexCSIDDef.MatchFirst(line); ok { | ||||||
|  |  | ||||||
|  | 			rfp, err := filepath.Rel(basedir, fn) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, "", err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			def := CSIDDef{ | ||||||
|  | 				File:         fn, | ||||||
|  | 				FileRelative: rfp, | ||||||
|  | 				Name:         match.GroupByName("name").Value(), | ||||||
|  | 				Prefix:       match.GroupByName("prefix").Value(), | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
|  | 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			ids = append(ids, def) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return ids, pkgname, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func fmtCSIDOutput(cs string, ids []CSIDDef, pkgname string) string { | ||||||
|  | 	templ := template.Must(template.New("csid-generate").Parse(templateCSIDGenerateText)) | ||||||
|  |  | ||||||
|  | 	buffer := bytes.Buffer{} | ||||||
|  |  | ||||||
|  | 	err := templ.Execute(&buffer, langext.H{ | ||||||
|  | 		"PkgName":      pkgname, | ||||||
|  | 		"Checksum":     cs, | ||||||
|  | 		"GoextVersion": goext.GoextVersion, | ||||||
|  | 		"IDs":          ids, | ||||||
|  | 	}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return buffer.String() | ||||||
|  | } | ||||||
							
								
								
									
										190
									
								
								bfcodegen/csid-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										190
									
								
								bfcodegen/csid-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,190 @@ | |||||||
|  | // Code generated by csid-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "crypto/rand" | ||||||
|  | import "crypto/sha256" | ||||||
|  | import "fmt" | ||||||
|  | import "github.com/go-playground/validator/v10" | ||||||
|  | import "github.com/rs/zerolog/log" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
|  | import "math/big" | ||||||
|  | import "reflect" | ||||||
|  | import "regexp" | ||||||
|  | import "strings" | ||||||
|  |  | ||||||
|  | const ChecksumCharsetIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | const idlen = 24 | ||||||
|  |  | ||||||
|  | const checklen = 1 | ||||||
|  |  | ||||||
|  | const idCharset = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" | ||||||
|  | const idCharsetLen = len(idCharset) | ||||||
|  |  | ||||||
|  | var charSetReverseMap = generateCharsetMap() | ||||||
|  |  | ||||||
|  | const ({{range .IDs}} | ||||||
|  | 	prefix{{.Name}} = "{{.Prefix}}" {{end}} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var ({{range .IDs}} | ||||||
|  | 	regex{{.Name}} = generateRegex(prefix{{.Name}}) {{end}} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func generateRegex(prefix string) rext.Regex { | ||||||
|  | 	return rext.W(regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen))) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateCharsetMap() []int { | ||||||
|  | 	result := make([]int, 128) | ||||||
|  | 	for i := 0; i < len(result); i++ { | ||||||
|  | 		result[i] = -1 | ||||||
|  | 	} | ||||||
|  | 	for idx, chr := range idCharset { | ||||||
|  | 		result[int(chr)] = idx | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateID(prefix string) string { | ||||||
|  | 	k := "" | ||||||
|  | 	csMax := big.NewInt(int64(idCharsetLen)) | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := 0; i < idlen-len(prefix)-checklen; i++ { | ||||||
|  | 		v, err := rand.Int(rand.Reader, csMax) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err) | ||||||
|  | 		} | ||||||
|  | 		v64 := v.Int64() | ||||||
|  | 		k += string(idCharset[v64]) | ||||||
|  | 		checksum = (checksum + int(v64)) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  | 	return prefix + k + checkstr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func generateIDFromSeed(prefix string, seed string) string { | ||||||
|  | 	h := sha256.New() | ||||||
|  |  | ||||||
|  | 	iddata := "" | ||||||
|  | 	for len(iddata) < idlen-len(prefix)-checklen { | ||||||
|  | 		h.Write([]byte(seed)) | ||||||
|  | 		bs := h.Sum(nil) | ||||||
|  | 		iddata += langext.NewAnyBaseConverter(idCharset).Encode(bs) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := 0; i < idlen-len(prefix)-checklen; i++ { | ||||||
|  | 		ichr := int(iddata[i]) | ||||||
|  | 		checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  |  | ||||||
|  | 	return prefix + iddata[:(idlen-len(prefix)-checklen)] + checkstr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func validateID(prefix string, value string) error { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id has the wrong length").Str("value", value).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !strings.HasPrefix(value, prefix) { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id is missing the correct prefix").Str("value", value).Str("prefix", prefix).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checksum := 0 | ||||||
|  | 	for i := len(prefix); i < len(value)-checklen; i++ { | ||||||
|  | 		ichr := int(value[i]) | ||||||
|  | 		if ichr < 0 || ichr >= len(charSetReverseMap) || charSetReverseMap[ichr] == -1 { | ||||||
|  | 			return exerr.New(exerr.TypeInvalidCSID, "id contains invalid characters").Str("value", value).Build() | ||||||
|  | 		} | ||||||
|  | 		checksum = (checksum + charSetReverseMap[ichr]) % (idCharsetLen) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	checkstr := string(idCharset[checksum%idCharsetLen]) | ||||||
|  |  | ||||||
|  | 	if !strings.HasSuffix(value, checkstr) { | ||||||
|  | 		return exerr.New(exerr.TypeInvalidCSID, "id checkstring is invalid").Str("value", value).Str("checkstr", checkstr).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func getRawData(prefix string, value string) string { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return "" | ||||||
|  | 	} | ||||||
|  | 	return value[len(prefix) : idlen-checklen] | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func getCheckString(prefix string, value string) string { | ||||||
|  | 	if len(value) != idlen { | ||||||
|  | 		return "" | ||||||
|  | 	} | ||||||
|  | 	return value[idlen-checklen:] | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ValidateEntityID(vfl validator.FieldLevel) bool { | ||||||
|  | 	if !vfl.Field().CanInterface() { | ||||||
|  | 		log.Error().Msgf("Failed to validate EntityID (cannot interface ?!?)") | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ifvalue := vfl.Field().Interface() | ||||||
|  |  | ||||||
|  | 	if value1, ok := ifvalue.(EntityID); ok { | ||||||
|  |  | ||||||
|  | 		if vfl.Field().Type().Kind() == reflect.Pointer && langext.IsNil(value1) { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if err := value1.Valid(); err != nil { | ||||||
|  | 			log.Debug().Msgf("Failed to validate EntityID '%s' (%s)", value1.String(), err.Error()) | ||||||
|  | 			return false | ||||||
|  | 		} else { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  | 		log.Error().Msgf("Failed to validate EntityID (wrong type: %T)", ifvalue) | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{range .IDs}} | ||||||
|  |  | ||||||
|  | // ================================ {{.Name}} ({{.FileRelative}}) ================================ | ||||||
|  |  | ||||||
|  | func New{{.Name}}() {{.Name}} { | ||||||
|  | 	return {{.Name}}(generateID(prefix{{.Name}})) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Valid() error { | ||||||
|  | 	return validateID(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) String() string { | ||||||
|  | 	return string(i) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) Prefix() string { | ||||||
|  | 	return prefix{{.Name}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Raw() string { | ||||||
|  | 	return getRawData(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) CheckString() string { | ||||||
|  | 	return getCheckString(prefix{{.Name}}, string(id)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (id {{.Name}}) Regex() rext.Regex { | ||||||
|  | 	return regex{{.Name}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
							
								
								
									
										52
									
								
								bfcodegen/csid-generate_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								bfcodegen/csid-generate_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | //go:embed _test_example_1.tgz | ||||||
|  | var CSIDExampleModels1 []byte | ||||||
|  |  | ||||||
|  | func TestGenerateCSIDSpecs(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, CSIDExampleModels1, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateCharsetIDSpecs(tmpDir, tmpDir+"/csid_gen.go", CSIDGenOptions{DebugOutput: langext.PTrue}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/csid_gen.go"))(t))) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | } | ||||||
| @@ -1,10 +1,13 @@ | |||||||
| package bfcodegen | package bfcodegen | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
|  | 	"encoding/json" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext" | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| @@ -12,15 +15,18 @@ import ( | |||||||
| 	"os" | 	"os" | ||||||
| 	"path" | 	"path" | ||||||
| 	"path/filepath" | 	"path/filepath" | ||||||
|  | 	"reflect" | ||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"text/template" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type EnumDefVal struct { | type EnumDefVal struct { | ||||||
| 	VarName     string | 	VarName     string | ||||||
| 	Value       string | 	Value       string | ||||||
| 	Description *string | 	Description *string | ||||||
|  | 	Data        *map[string]any | ||||||
|  | 	RawComment  *string | ||||||
| } | } | ||||||
|  |  | ||||||
| type EnumDef struct { | type EnumDef struct { | ||||||
| @@ -31,20 +37,23 @@ type EnumDef struct { | |||||||
| 	Values       []EnumDefVal | 	Values       []EnumDefVal | ||||||
| } | } | ||||||
|  |  | ||||||
| var rexPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | type EnumGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | 	GoFormat    *bool | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||||
|  |  | ||||||
| var rexValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`)) | var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<comm>.*))?.*$`)) | ||||||
|  |  | ||||||
| var rexChecksumConst = rext.W(regexp.MustCompile(`const ChecksumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
| func GenerateEnumSpecs(sourceDir string, destFile string) error { | //go:embed enum-generate.template | ||||||
|  | var templateEnumGenerateText string | ||||||
|  |  | ||||||
| 	files, err := os.ReadDir(sourceDir) | func GenerateEnumSpecs(sourceDir string, destFile string, opt EnumGenOptions) error { | ||||||
| 	if err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	oldChecksum := "N/A" | 	oldChecksum := "N/A" | ||||||
| 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||||
| @@ -52,20 +61,45 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
| 		if m, ok := rexChecksumConst.MatchFirst(string(content)); ok { | 		if m, ok := rexEnumChecksumConst.MatchFirst(string(content)); ok { | ||||||
| 			oldChecksum = m.GroupByName("cs").Value() | 			oldChecksum = m.GroupByName("cs").Value() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, langext.Coalesce(opt.GoFormat, true), langext.Coalesce(opt.DebugOutput, false)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !changed { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = os.WriteFile(destFile, []byte(gocode), 0o755) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool, debugOutput bool) (string, string, bool, error) { | ||||||
|  |  | ||||||
|  | 	files, err := os.ReadDir(sourceDir) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", "", false, err | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) | ||||||
| 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") }) | ||||||
| 	langext.SortBy(files, func(v os.DirEntry) string { return v.Name() }) | 	langext.SortBy(files, func(v os.DirEntry) string { return v.Name() }) | ||||||
|  |  | ||||||
| 	newChecksumStr := goext.GoextVersion | 	newChecksumStr := goext.GoextVersion | ||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
| 		content, err := os.ReadFile(path.Join(sourceDir, f.Name())) | 		content, err := os.ReadFile(path.Join(sourceDir, f.Name())) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return "", "", false, err | ||||||
| 		} | 		} | ||||||
| 		newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content) | 		newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content) | ||||||
| 	} | 	} | ||||||
| @@ -76,7 +110,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 		fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | 		fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | ||||||
| 	} else { | 	} else { | ||||||
| 		fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | 		fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | ||||||
| 		return nil | 		return "", oldChecksum, false, nil | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	allEnums := make([]EnumDef, 0) | 	allEnums := make([]EnumDef, 0) | ||||||
| @@ -84,13 +118,18 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 	pkgname := "" | 	pkgname := "" | ||||||
|  |  | ||||||
| 	for _, f := range files { | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("========= %s =========\n\n", f.Name()) | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
| 		fileEnums, pn, err := processFile(sourceDir, path.Join(sourceDir, f.Name())) |  | ||||||
| 		if err != nil { |  | ||||||
| 			return err |  | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return "", "", false, err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
| 			fmt.Printf("\n") | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		allEnums = append(allEnums, fileEnums...) | 		allEnums = append(allEnums, fileEnums...) | ||||||
|  |  | ||||||
| @@ -100,32 +139,24 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if pkgname == "" { | 	if pkgname == "" { | ||||||
| 		return errors.New("no package name found in any file") | 		return "", "", false, errors.New("no package name found in any file") | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	err = os.WriteFile(destFile, []byte(fmtOutput(newChecksum, allEnums, pkgname)), 0o755) | 	rdata := fmtEnumOutput(newChecksum, allEnums, pkgname) | ||||||
|  |  | ||||||
|  | 	if !gofmt { | ||||||
|  | 		return rdata, newChecksum, true, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	fdata, err := format.Source([]byte(rdata)) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return "", "", false, err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	res, err := cmdext.RunCommand("go", []string{"fmt", destFile}, langext.Ptr(2*time.Second)) | 	return string(fdata), newChecksum, true, nil | ||||||
| 	if err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if res.CommandTimedOut { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt timed out") |  | ||||||
| 	} |  | ||||||
| 	if res.ExitCode != 0 { |  | ||||||
| 		fmt.Println(res.StdCombined) |  | ||||||
| 		return errors.New("go fmt did not succeed") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return nil |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func processFile(basedir string, fn string) ([]EnumDef, string, error) { | func processEnumFile(basedir string, fn string, debugOutput bool) ([]EnumDef, string, error) { | ||||||
| 	file, err := os.Open(fn) | 	file, err := os.Open(fn) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, "", err | 		return nil, "", err | ||||||
| @@ -149,7 +180,7 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| 			break | 			break | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		if match, ok := rexPackage.MatchFirst(line); i == 0 && ok { | 		if match, ok := rexEnumPackage.MatchFirst(line); i == 0 && ok { | ||||||
| 			pkgname = match.GroupByName("name").Value() | 			pkgname = match.GroupByName("name").Value() | ||||||
| 			continue | 			continue | ||||||
| 		} | 		} | ||||||
| @@ -169,15 +200,42 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| 				Values:       make([]EnumDefVal, 0), | 				Values:       make([]EnumDefVal, 0), | ||||||
| 			} | 			} | ||||||
| 			enums = append(enums, def) | 			enums = append(enums, def) | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
| 				fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | 				fmt.Printf("Found enum definition { '%s' -> '%s' }\n", def.EnumTypeName, def.Type) | ||||||
| 			} | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		if match, ok := rexValueDef.MatchFirst(line); ok { | 		if match, ok := rexEnumValueDef.MatchFirst(line); ok { | ||||||
| 			typename := match.GroupByName("type").Value() | 			typename := match.GroupByName("type").Value() | ||||||
|  |  | ||||||
|  | 			comment := match.GroupByNameOrEmpty("comm").ValueOrNil() | ||||||
|  | 			var descr *string = nil | ||||||
|  | 			var data *map[string]any = nil | ||||||
|  | 			if comment != nil { | ||||||
|  | 				comment = langext.Ptr(strings.TrimSpace(*comment)) | ||||||
|  | 				if strings.HasPrefix(*comment, "{") { | ||||||
|  | 					if v, ok := tryParseDataComment(*comment); ok { | ||||||
|  | 						data = &v | ||||||
|  | 						if anyDataDescr, ok := v["description"]; ok { | ||||||
|  | 							if dataDescr, ok := anyDataDescr.(string); ok { | ||||||
|  | 								descr = &dataDescr | ||||||
|  | 							} | ||||||
|  | 						} | ||||||
|  | 					} else { | ||||||
|  | 						descr = comment | ||||||
|  | 					} | ||||||
|  | 				} else { | ||||||
|  | 					descr = comment | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  |  | ||||||
| 			def := EnumDefVal{ | 			def := EnumDefVal{ | ||||||
| 				VarName:     match.GroupByName("name").Value(), | 				VarName:     match.GroupByName("name").Value(), | ||||||
| 				Value:       match.GroupByName("value").Value(), | 				Value:       match.GroupByName("value").Value(), | ||||||
| 				Description: match.GroupByNameOrEmpty("descr").ValueOrNil(), | 				RawComment:  comment, | ||||||
|  | 				Description: descr, | ||||||
|  | 				Data:        data, | ||||||
| 			} | 			} | ||||||
|  |  | ||||||
| 			found := false | 			found := false | ||||||
| @@ -185,151 +243,131 @@ func processFile(basedir string, fn string) ([]EnumDef, string, error) { | |||||||
| 				if v.EnumTypeName == typename { | 				if v.EnumTypeName == typename { | ||||||
| 					enums[i].Values = append(enums[i].Values, def) | 					enums[i].Values = append(enums[i].Values, def) | ||||||
| 					found = true | 					found = true | ||||||
|  |  | ||||||
|  | 					if debugOutput { | ||||||
| 						if def.Description != nil { | 						if def.Description != nil { | ||||||
| 							fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | 							fmt.Printf("Found enum value [%s] for '%s'  ('%s')\n", def.Value, def.VarName, *def.Description) | ||||||
| 						} else { | 						} else { | ||||||
| 							fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | 							fmt.Printf("Found enum value [%s] for '%s'\n", def.Value, def.VarName) | ||||||
| 						} | 						} | ||||||
|  | 					} | ||||||
| 					break | 					break | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| 			if !found { | 			if !found { | ||||||
|  | 				if debugOutput { | ||||||
| 					fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | 					fmt.Printf("Found non-enum value [%s] for '%s' ( looks like enum value, but no matching @enum:type )\n", def.Value, def.VarName) | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| 		} | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	return enums, pkgname, nil | 	return enums, pkgname, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func fmtOutput(cs string, enums []EnumDef, pkgname string) string { | func tryParseDataComment(s string) (map[string]any, bool) { | ||||||
| 	str := "// Code generated by enum-generate.go DO NOT EDIT.\n" |  | ||||||
| 	str += "\n" |  | ||||||
| 	str += "package " + pkgname + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n" | 	r := make(map[string]any) | ||||||
| 	str += "import \"gogs.mikescher.com/BlackForestBytes/goext/enums\"" + "\n" |  | ||||||
| 	str += "\n" |  | ||||||
|  |  | ||||||
| 	str += "const ChecksumGenerator = \"" + cs + "\" // GoExtVersion: " + goext.GoextVersion + "\n" | 	err := json.Unmarshal([]byte(s), &r) | ||||||
| 	str += "\n" | 	if err != nil { | ||||||
|  | 		return nil, false | ||||||
| 	for _, enumdef := range enums { |  | ||||||
|  |  | ||||||
| 		hasDescr := langext.ArrAll(enumdef.Values, func(val EnumDefVal) bool { return val.Description != nil }) |  | ||||||
| 		hasStr := enumdef.Type == "string" |  | ||||||
|  |  | ||||||
| 		str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n" |  | ||||||
| 		str += "//" + "\n" |  | ||||||
| 		str += "// File:       " + enumdef.FileRelative + "\n" |  | ||||||
| 		str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n" |  | ||||||
| 		str += "// DescrEnum:  " + langext.Conditional(hasDescr, "true", "false") + "\n" |  | ||||||
| 		str += "//" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "var __" + enumdef.EnumTypeName + "Values = []" + enumdef.EnumTypeName + "{" + "\n" |  | ||||||
| 		for _, v := range enumdef.Values { |  | ||||||
| 			str += "    " + v.VarName + "," + "\n" |  | ||||||
| 		} |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		if hasDescr { |  | ||||||
| 			str += "var __" + enumdef.EnumTypeName + "Descriptions = map[" + enumdef.EnumTypeName + "]string{" + "\n" |  | ||||||
| 			for _, v := range enumdef.Values { |  | ||||||
| 				str += "    " + v.VarName + ": \"" + strings.TrimSpace(*v.Description) + "\"," + "\n" |  | ||||||
| 			} |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 		str += "var __" + enumdef.EnumTypeName + "Varnames = map[" + enumdef.EnumTypeName + "]string{" + "\n" | 	for _, v := range r { | ||||||
| 		for _, v := range enumdef.Values { |  | ||||||
| 			str += "    " + v.VarName + ": \"" + v.VarName + "\"," + "\n" | 		rv := reflect.ValueOf(v) | ||||||
|  |  | ||||||
|  | 		if rv.Kind() == reflect.Ptr && rv.IsNil() { | ||||||
|  | 			continue | ||||||
| 		} | 		} | ||||||
| 		str += "}" + "\n" | 		if rv.Kind() == reflect.Bool { | ||||||
| 		str += "" + "\n" | 			continue | ||||||
|  | 		} | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Valid() bool {" + "\n" | 		if rv.Kind() == reflect.String { | ||||||
| 		str += "    return langext.InArray(e, __" + enumdef.EnumTypeName + "Values)" + "\n" | 			continue | ||||||
| 		str += "}" + "\n" | 		} | ||||||
| 		str += "" + "\n" | 		if rv.Kind() == reflect.Int64 { | ||||||
|  | 			continue | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Values() []" + enumdef.EnumTypeName + " {" + "\n" | 		} | ||||||
| 		str += "    return __" + enumdef.EnumTypeName + "Values" + "\n" | 		if rv.Kind() == reflect.Float64 { | ||||||
| 		str += "}" + "\n" | 			continue | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") ValuesAny() []any {" + "\n" |  | ||||||
| 		str += "    return langext.ArrCastToAny(__" + enumdef.EnumTypeName + "Values)" + "\n" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") ValuesMeta() []enums.EnumMetaValue {" + "\n" |  | ||||||
| 		str += "    return " + enumdef.EnumTypeName + "ValuesMeta()" |  | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" |  | ||||||
|  |  | ||||||
| 		if hasStr { |  | ||||||
| 			str += "func (e " + enumdef.EnumTypeName + ") String() string {" + "\n" |  | ||||||
| 			str += "    return string(e)" + "\n" |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		if hasDescr { | 		return nil, false | ||||||
| 			str += "func (e " + enumdef.EnumTypeName + ") Description() string {" + "\n" |  | ||||||
| 			str += "    if d, ok := __" + enumdef.EnumTypeName + "Descriptions[e]; ok {" + "\n" |  | ||||||
| 			str += "        return d" + "\n" |  | ||||||
| 			str += "    }" + "\n" |  | ||||||
| 			str += "    return \"\"" + "\n" |  | ||||||
| 			str += "}" + "\n" |  | ||||||
| 			str += "" + "\n" |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") VarName() string {" + "\n" | 	return r, true | ||||||
| 		str += "    if d, ok := __" + enumdef.EnumTypeName + "Varnames[e]; ok {" + "\n" | } | ||||||
| 		str += "        return d" + "\n" |  | ||||||
| 		str += "    }" + "\n" | func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string { | ||||||
| 		str += "    return \"\"" + "\n" |  | ||||||
| 		str += "}" + "\n" | 	templ := template.New("enum-generate") | ||||||
| 		str += "" + "\n" |  | ||||||
|  | 	templ = templ.Funcs(template.FuncMap{ | ||||||
| 		str += "func (e " + enumdef.EnumTypeName + ") Meta() enums.EnumMetaValue {" + "\n" | 		"boolToStr": func(b bool) string { return langext.Conditional(b, "true", "false") }, | ||||||
| 		if hasDescr { | 		"deref":     func(v *string) string { return *v }, | ||||||
| 			str += "    return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())}" | 		"trimSpace": func(str string) string { return strings.TrimSpace(str) }, | ||||||
| 		} else { | 		"hasStr":    func(v EnumDef) bool { return v.Type == "string" }, | ||||||
| 			str += "    return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil}" | 		"hasDescr": func(v EnumDef) bool { | ||||||
| 		} | 			return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil }) | ||||||
| 		str += "}" + "\n" | 		}, | ||||||
| 		str += "" + "\n" | 		"hasData": func(v EnumDef) bool { | ||||||
|  | 			return len(v.Values) > 0 && langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Data != nil }) | ||||||
| 		str += "func Parse" + enumdef.EnumTypeName + "(vv string) (" + enumdef.EnumTypeName + ", bool) {" + "\n" | 		}, | ||||||
| 		str += "    for _, ev := range __" + enumdef.EnumTypeName + "Values {" + "\n" | 		"gostr": func(v any) string { | ||||||
| 		str += "        if string(ev) == vv {" + "\n" | 			return fmt.Sprintf("%#+v", v) | ||||||
| 		str += "            return ev, true" + "\n" | 		}, | ||||||
| 		str += "        }" + "\n" | 		"goobj": func(name string, v any) string { | ||||||
| 		str += "    }" + "\n" | 			return fmt.Sprintf("%#+v", v) | ||||||
| 		str += "    return \"\", false" + "\n" | 		}, | ||||||
| 		str += "}" + "\n" | 		"godatakey": func(v string) string { | ||||||
| 		str += "" + "\n" | 			return strings.ToUpper(v[0:1]) + v[1:] | ||||||
|  | 		}, | ||||||
| 		str += "func " + enumdef.EnumTypeName + "Values() []" + enumdef.EnumTypeName + " {" + "\n" | 		"godatavalue": func(v any) string { | ||||||
| 		str += "    return __" + enumdef.EnumTypeName + "Values" + "\n" | 			return fmt.Sprintf("%#+v", v) | ||||||
| 		str += "}" + "\n" | 		}, | ||||||
| 		str += "" + "\n" | 		"godatatype": func(v any) string { | ||||||
|  | 			return fmt.Sprintf("%T", v) | ||||||
| 		str += "func " + enumdef.EnumTypeName + "ValuesMeta() []enums.EnumMetaValue {" + "\n" | 		}, | ||||||
| 		str += "    return []enums.EnumMetaValue{" + "\n" | 		"mapindex": func(v map[string]any, k string) any { | ||||||
| 		for _, v := range enumdef.Values { | 			return v[k] | ||||||
| 			str += "        " + v.VarName + ".Meta(),\n" | 		}, | ||||||
| 		} | 		"generalDataKeys": func(v EnumDef) map[string]string { | ||||||
| 		str += "    }" + "\n" | 			r0 := make(map[string]int) | ||||||
| 		str += "}" + "\n" |  | ||||||
| 		str += "" + "\n" | 			for _, eval := range v.Values { | ||||||
|  | 				for k := range *eval.Data { | ||||||
| 	} | 					if ctr, ok := r0[k]; ok { | ||||||
|  | 						r0[k] = ctr + 1 | ||||||
| 	return str | 					} else { | ||||||
|  | 						r0[k] = 1 | ||||||
|  | 					} | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			r1 := langext.MapToArr(r0) | ||||||
|  | 			r2 := langext.ArrFilter(r1, func(p langext.MapEntry[string, int]) bool { return p.Value == len(v.Values) }) | ||||||
|  | 			r3 := langext.ArrMap(r2, func(p langext.MapEntry[string, int]) string { return p.Key }) | ||||||
|  | 			r4 := langext.ArrToKVMap(r3, func(p string) string { return p }, func(p string) string { return fmt.Sprintf("%T", (*v.Values[0].Data)[p]) }) | ||||||
|  |  | ||||||
|  | 			return r4 | ||||||
|  | 		}, | ||||||
|  | 	}) | ||||||
|  |  | ||||||
|  | 	templ = template.Must(templ.Parse(templateEnumGenerateText)) | ||||||
|  |  | ||||||
|  | 	buffer := bytes.Buffer{} | ||||||
|  |  | ||||||
|  | 	err := templ.Execute(&buffer, langext.H{ | ||||||
|  | 		"PkgName":      pkgname, | ||||||
|  | 		"Checksum":     cs, | ||||||
|  | 		"GoextVersion": goext.GoextVersion, | ||||||
|  | 		"Enums":        enums, | ||||||
|  | 	}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return buffer.String() | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										155
									
								
								bfcodegen/enum-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										155
									
								
								bfcodegen/enum-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,155 @@ | |||||||
|  | // Code generated by enum-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/enums" | ||||||
|  |  | ||||||
|  | const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | {{ $pkgname  := .PkgName }} | ||||||
|  |  | ||||||
|  | {{range .Enums}} | ||||||
|  |  | ||||||
|  | {{ $hasStr   := ( . | hasStr   ) }} | ||||||
|  | {{ $hasDescr := ( . | hasDescr ) }} | ||||||
|  | {{ $hasData  := ( . | hasData  ) }} | ||||||
|  |  | ||||||
|  | // ================================ {{.EnumTypeName}} ================================ | ||||||
|  | // | ||||||
|  | // File:       {{.FileRelative}} | ||||||
|  | // StringEnum: {{$hasStr   | boolToStr}} | ||||||
|  | // DescrEnum:  {{$hasDescr | boolToStr}} | ||||||
|  | // DataEnum:   {{$hasData  | boolToStr}} | ||||||
|  | // | ||||||
|  |  | ||||||
|  | {{ $typename := .EnumTypeName }} | ||||||
|  | {{ $enumdef  := . }} | ||||||
|  |  | ||||||
|  | var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}} | ||||||
|  | 	{{.VarName}}, {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}} | ||||||
|  | 	{{.VarName}}: {{.Description | deref | trimSpace | gostr}}, {{end}} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | {{if $hasData}} | ||||||
|  | type {{ .EnumTypeName }}Data struct { {{ range $datakey, $datatype := ($enumdef | generalDataKeys)  }} | ||||||
|  |     {{ $datakey | godatakey }} {{ $datatype }} `json:"{{ $datakey }}"` {{ end }} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var __{{.EnumTypeName}}Data = map[{{.EnumTypeName}}]{{.EnumTypeName}}Data{ {{range .Values}} {{ $enumvalue := . }} | ||||||
|  | 	{{.VarName}}: {{ $typename }}Data{ {{ range $datakey, $datatype := $enumdef | generalDataKeys  }} | ||||||
|  | 	    {{ $datakey | godatakey }}: {{ (mapindex $enumvalue.Data $datakey) | godatavalue }}, {{ end }} | ||||||
|  | 	}, {{end}} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | var __{{.EnumTypeName}}Varnames = map[{{.EnumTypeName}}]string{ {{range .Values}} | ||||||
|  | 	{{.VarName}}: "{{.VarName}}", {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Valid() bool { | ||||||
|  | 	return langext.InArray(e, __{{.EnumTypeName}}Values) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Values() []{{.EnumTypeName}} { | ||||||
|  | 	return __{{.EnumTypeName}}Values | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) ValuesAny() []any { | ||||||
|  | 	return langext.ArrCastToAny(__{{.EnumTypeName}}Values) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return {{.EnumTypeName}}ValuesMeta() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if $hasStr}} | ||||||
|  | func (e {{.EnumTypeName}}) String() string { | ||||||
|  | 	return string(e) | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | func (e {{.EnumTypeName}}) Description() string { | ||||||
|  | 	if d, ok := __{{.EnumTypeName}}Descriptions[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | {{if $hasData}} | ||||||
|  | func (e {{.EnumTypeName}}) Data() {{.EnumTypeName}}Data { | ||||||
|  | 	if d, ok := __{{.EnumTypeName}}Data[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return {{.EnumTypeName}}Data{} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) VarName() string { | ||||||
|  | 	if d, ok := __{{.EnumTypeName}}Varnames[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) TypeName() string { | ||||||
|  | 	return "{{$typename}}" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) PackageName() string { | ||||||
|  | 	return "{{$pkgname }}" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue { | ||||||
|  |     {{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | func (e {{.EnumTypeName}}) DescriptionMeta() enums.EnumDescriptionMetaValue { | ||||||
|  |     return enums.EnumDescriptionMetaValue{VarName: e.VarName(), Value: e, Description: e.Description()} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) { | ||||||
|  | 	for _, ev := range __{{.EnumTypeName}}Values { | ||||||
|  | 		if string(ev) == vv { | ||||||
|  | 			return ev, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func {{.EnumTypeName}}Values() []{{.EnumTypeName}} { | ||||||
|  | 	return __{{.EnumTypeName}}Values | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func {{.EnumTypeName}}ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return []enums.EnumMetaValue{ {{range .Values}} | ||||||
|  |             {{.VarName}}.Meta(), {{end}} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if $hasDescr}} | ||||||
|  | func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue { | ||||||
|  | 	return []enums.EnumDescriptionMetaValue{ {{range .Values}} | ||||||
|  |             {{.VarName}}.DescriptionMeta(), {{end}} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | // ================================ ================= ================================ | ||||||
|  |  | ||||||
|  | func AllPackageEnums() []enums.Enum { | ||||||
|  |     return []enums.Enum{ {{range .Enums}} | ||||||
|  |         {{ if gt (len .Values) 0 }} {{  $v := index .Values 0 }} {{ $v.VarName}}, {{end}} // {{ .EnumTypeName }} {{end}} | ||||||
|  |     } | ||||||
|  | } | ||||||
| @@ -1,15 +1,91 @@ | |||||||
| package bfcodegen | package bfcodegen | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"path/filepath" | ||||||
| 	"testing" | 	"testing" | ||||||
|  | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func TestApplyEnvOverridesSimple(t *testing.T) { | //go:embed _test_example_1.tgz | ||||||
|  | var EnumExampleModels1 []byte | ||||||
|  |  | ||||||
| 	err := GenerateEnumSpecs("/home/mike/Code/reiff/badennet/bnet-backend/models", "/home/mike/Code/reiff/badennet/bnet-backend/models/enums_gen.go") | //go:embed _test_example_2.tgz | ||||||
| 	if err != nil { | var EnumExampleModels2 []byte | ||||||
| 		t.Error(err) |  | ||||||
| 		t.Fail() |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
|  | func TestGenerateEnumSpecs(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, EnumExampleModels1, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true, true) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, cs1, cs2) | ||||||
|  | 	tst.AssertEqual(t, s1, s2) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(s1) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestGenerateEnumSpecsData(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, EnumExampleModels2, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true, true) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(s1) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										198
									
								
								bfcodegen/id-generate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										198
									
								
								bfcodegen/id-generate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,198 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	_ "embed" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go/format" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cryptext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
|  | 	"io" | ||||||
|  | 	"os" | ||||||
|  | 	"path" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"regexp" | ||||||
|  | 	"strings" | ||||||
|  | 	"text/template" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type IDDef struct { | ||||||
|  | 	File         string | ||||||
|  | 	FileRelative string | ||||||
|  | 	Name         string | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type IDGenOptions struct { | ||||||
|  | 	DebugOutput *bool | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var rexIDPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]+)\s*$`)) | ||||||
|  |  | ||||||
|  | var rexIDDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+string\s*//\s*(@id:type).*$`)) | ||||||
|  |  | ||||||
|  | var rexIDChecksumConst = rext.W(regexp.MustCompile(`const ChecksumIDGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|  | //go:embed id-generate.template | ||||||
|  | var templateIDGenerateText string | ||||||
|  |  | ||||||
|  | func GenerateIDSpecs(sourceDir string, destFile string, opt IDGenOptions) error { | ||||||
|  |  | ||||||
|  | 	debugOutput := langext.Coalesce(opt.DebugOutput, false) | ||||||
|  |  | ||||||
|  | 	files, err := os.ReadDir(sourceDir) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	oldChecksum := "N/A" | ||||||
|  | 	if _, err := os.Stat(destFile); !os.IsNotExist(err) { | ||||||
|  | 		content, err := os.ReadFile(destFile) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  | 		if m, ok := rexIDChecksumConst.MatchFirst(string(content)); ok { | ||||||
|  | 			oldChecksum = m.GroupByName("cs").Value() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) }) | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") }) | ||||||
|  | 	files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") }) | ||||||
|  | 	langext.SortBy(files, func(v os.DirEntry) string { return v.Name() }) | ||||||
|  |  | ||||||
|  | 	newChecksumStr := goext.GoextVersion | ||||||
|  | 	for _, f := range files { | ||||||
|  | 		content, err := os.ReadFile(path.Join(sourceDir, f.Name())) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  | 		newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	newChecksum := cryptext.BytesSha256([]byte(newChecksumStr)) | ||||||
|  |  | ||||||
|  | 	if newChecksum != oldChecksum { | ||||||
|  | 		fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum) | ||||||
|  | 	} else { | ||||||
|  | 		fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	allIDs := make([]IDDef, 0) | ||||||
|  |  | ||||||
|  | 	pkgname := "" | ||||||
|  |  | ||||||
|  | 	for _, f := range files { | ||||||
|  | 		if debugOutput { | ||||||
|  | 			fmt.Printf("========= %s =========\n\n", f.Name()) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		fileIDs, pn, err := processIDFile(sourceDir, path.Join(sourceDir, f.Name()), debugOutput) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if debugOutput { | ||||||
|  | 			fmt.Printf("\n") | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		allIDs = append(allIDs, fileIDs...) | ||||||
|  |  | ||||||
|  | 		if pn != "" { | ||||||
|  | 			pkgname = pn | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if pkgname == "" { | ||||||
|  | 		return errors.New("no package name found in any file") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	fdata, err := format.Source([]byte(fmtIDOutput(newChecksum, allIDs, pkgname))) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = os.WriteFile(destFile, fdata, 0o755) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func processIDFile(basedir string, fn string, debugOutput bool) ([]IDDef, string, error) { | ||||||
|  | 	file, err := os.Open(fn) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defer func() { _ = file.Close() }() | ||||||
|  |  | ||||||
|  | 	bin, err := io.ReadAll(file) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	lines := strings.Split(string(bin), "\n") | ||||||
|  |  | ||||||
|  | 	ids := make([]IDDef, 0) | ||||||
|  |  | ||||||
|  | 	pkgname := "" | ||||||
|  |  | ||||||
|  | 	for i, line := range lines { | ||||||
|  | 		if i == 0 && strings.HasPrefix(line, "// Code generated by") { | ||||||
|  | 			break | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if match, ok := rexIDPackage.MatchFirst(line); i == 0 && ok { | ||||||
|  | 			pkgname = match.GroupByName("name").Value() | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if match, ok := rexIDDef.MatchFirst(line); ok { | ||||||
|  |  | ||||||
|  | 			rfp, err := filepath.Rel(basedir, fn) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, "", err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			def := IDDef{ | ||||||
|  | 				File:         fn, | ||||||
|  | 				FileRelative: rfp, | ||||||
|  | 				Name:         match.GroupByName("name").Value(), | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			if debugOutput { | ||||||
|  | 				fmt.Printf("Found ID definition { '%s' }\n", def.Name) | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			ids = append(ids, def) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return ids, pkgname, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func fmtIDOutput(cs string, ids []IDDef, pkgname string) string { | ||||||
|  | 	templ := template.Must(template.New("id-generate").Parse(templateIDGenerateText)) | ||||||
|  |  | ||||||
|  | 	buffer := bytes.Buffer{} | ||||||
|  |  | ||||||
|  | 	anyDef := langext.ArrFirstOrNil(ids, func(def IDDef) bool { return def.Name == "AnyID" || def.Name == "AnyId" }) | ||||||
|  |  | ||||||
|  | 	err := templ.Execute(&buffer, langext.H{ | ||||||
|  | 		"PkgName":      pkgname, | ||||||
|  | 		"Checksum":     cs, | ||||||
|  | 		"GoextVersion": goext.GoextVersion, | ||||||
|  | 		"IDs":          ids, | ||||||
|  | 		"AnyDef":       anyDef, | ||||||
|  | 	}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return buffer.String() | ||||||
|  | } | ||||||
							
								
								
									
										47
									
								
								bfcodegen/id-generate.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								bfcodegen/id-generate.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,47 @@ | |||||||
|  | // Code generated by id-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package {{.PkgName}} | ||||||
|  |  | ||||||
|  | import "go.mongodb.org/mongo-driver/bson" | ||||||
|  | import "go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | import "go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  |  | ||||||
|  | const ChecksumIDGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}} | ||||||
|  |  | ||||||
|  | {{range .IDs}} | ||||||
|  |  | ||||||
|  | // ================================ {{.Name}} ({{.FileRelative}}) ================================ | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	if objId, err := primitive.ObjectIDFromHex(string(i)); err == nil { | ||||||
|  | 		return bson.MarshalValue(objId) | ||||||
|  | 	} else { | ||||||
|  | 		return 0, nil, exerr.New(exerr.TypeMarshalEntityID, "Failed to marshal {{.Name}}("+i.String()+") to ObjectId").Str("value", string(i)).Type("type", i).Build() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) String() string { | ||||||
|  | 	return string(i) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) ObjID() (primitive.ObjectID, error) { | ||||||
|  | 	return primitive.ObjectIDFromHex(string(i)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (i {{.Name}}) Valid() bool { | ||||||
|  | 	_, err := primitive.ObjectIDFromHex(string(i)) | ||||||
|  | 	return err == nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{if ne $.AnyDef nil}} | ||||||
|  | func (i {{.Name}}) AsAny() {{$.AnyDef.Name}} { | ||||||
|  | 	return {{$.AnyDef.Name}}(i) | ||||||
|  | } | ||||||
|  | {{end}} | ||||||
|  |  | ||||||
|  | func New{{.Name}}() {{.Name}} { | ||||||
|  | 	return {{.Name}}(primitive.NewObjectID().Hex()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | {{end}} | ||||||
							
								
								
									
										52
									
								
								bfcodegen/id-generate_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								bfcodegen/id-generate_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | |||||||
|  | package bfcodegen | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	_ "embed" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/cmdext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"path/filepath" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | //go:embed _test_example_1.tgz | ||||||
|  | var IDExampleModels1 []byte | ||||||
|  |  | ||||||
|  | func TestGenerateIDSpecs(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz") | ||||||
|  |  | ||||||
|  | 	tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID()) | ||||||
|  |  | ||||||
|  | 	err := os.WriteFile(tmpFile, IDExampleModels1, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.Remove(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	err = os.Mkdir(tmpDir, 0o777) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t.Cleanup(func() { _ = os.RemoveAll(tmpFile) }) | ||||||
|  |  | ||||||
|  | 	_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run() | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", IDGenOptions{DebugOutput: langext.PTrue}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	err = GenerateIDSpecs(tmpDir, tmpDir+"/id_gen.go", IDGenOptions{DebugOutput: langext.PTrue}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/id_gen.go"))(t))) | ||||||
|  | 	fmt.Println("=====================================================================================================") | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | 	fmt.Println() | ||||||
|  | } | ||||||
| @@ -133,9 +133,6 @@ func run(opt CommandRunner) (CommandResult, error) { | |||||||
|  |  | ||||||
| 	case <-stderrFailChan: | 	case <-stderrFailChan: | ||||||
| 		_ = cmd.Process.Kill() | 		_ = cmd.Process.Kill() | ||||||
| 		for _, lstr := range opt.listener { |  | ||||||
| 			lstr.Timeout() |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if fallback, ok := syncext.ReadChannelWithTimeout(outputChan, 32*time.Millisecond); ok { | 		if fallback, ok := syncext.ReadChannelWithTimeout(outputChan, 32*time.Millisecond); ok { | ||||||
| 			// most of the time the cmd.Process.Kill() should also have finished the pipereader | 			// most of the time the cmd.Process.Kill() should also have finished the pipereader | ||||||
| @@ -160,7 +157,8 @@ func run(opt CommandRunner) (CommandResult, error) { | |||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 	case outobj := <-outputChan: | 	case outobj := <-outputChan: | ||||||
| 		if exiterr, ok := outobj.err.(*exec.ExitError); ok { | 		var exiterr *exec.ExitError | ||||||
|  | 		if errors.As(outobj.err, &exiterr) { | ||||||
| 			excode := exiterr.ExitCode() | 			excode := exiterr.ExitCode() | ||||||
| 			for _, lstr := range opt.listener { | 			for _, lstr := range opt.listener { | ||||||
| 				lstr.Finished(excode) | 				lstr.Finished(excode) | ||||||
|   | |||||||
| @@ -33,7 +33,7 @@ func TestStdout(t *testing.T) { | |||||||
|  |  | ||||||
| func TestStderr(t *testing.T) { | func TestStderr(t *testing.T) { | ||||||
|  |  | ||||||
| 	res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run() | 	res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").Run() | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		t.Errorf("%v", err) | 		t.Errorf("%v", err) | ||||||
| 	} | 	} | ||||||
| @@ -56,7 +56,7 @@ func TestStderr(t *testing.T) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func TestStdcombined(t *testing.T) { | func TestStdcombined(t *testing.T) { | ||||||
| 	res1, err := Runner("python"). | 	res1, err := Runner("python3"). | ||||||
| 		Arg("-c"). | 		Arg("-c"). | ||||||
| 		Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)"). | 		Arg("import sys; import time; print(\"1\", file=sys.stderr, flush=True); time.sleep(0.1); print(\"2\", file=sys.stdout, flush=True); time.sleep(0.1); print(\"3\", file=sys.stderr, flush=True)"). | ||||||
| 		Run() | 		Run() | ||||||
| @@ -82,7 +82,7 @@ func TestStdcombined(t *testing.T) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func TestPartialRead(t *testing.T) { | func TestPartialRead(t *testing.T) { | ||||||
| 	res1, err := Runner("python"). | 	res1, err := Runner("python3"). | ||||||
| 		Arg("-c"). | 		Arg("-c"). | ||||||
| 		Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);"). | 		Arg("import sys; import time; print(\"first message\", flush=True); time.sleep(5); print(\"cant see me\", flush=True);"). | ||||||
| 		Timeout(100 * time.Millisecond). | 		Timeout(100 * time.Millisecond). | ||||||
| @@ -106,7 +106,7 @@ func TestPartialRead(t *testing.T) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func TestPartialReadStderr(t *testing.T) { | func TestPartialReadStderr(t *testing.T) { | ||||||
| 	res1, err := Runner("python"). | 	res1, err := Runner("python3"). | ||||||
| 		Arg("-c"). | 		Arg("-c"). | ||||||
| 		Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);"). | 		Arg("import sys; import time; print(\"first message\", file=sys.stderr, flush=True); time.sleep(5); print(\"cant see me\", file=sys.stderr, flush=True);"). | ||||||
| 		Timeout(100 * time.Millisecond). | 		Timeout(100 * time.Millisecond). | ||||||
| @@ -131,7 +131,7 @@ func TestPartialReadStderr(t *testing.T) { | |||||||
|  |  | ||||||
| func TestReadUnflushedStdout(t *testing.T) { | func TestReadUnflushedStdout(t *testing.T) { | ||||||
|  |  | ||||||
| 	res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run() | 	res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stdout, end='')").Run() | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		t.Errorf("%v", err) | 		t.Errorf("%v", err) | ||||||
| 	} | 	} | ||||||
| @@ -155,7 +155,7 @@ func TestReadUnflushedStdout(t *testing.T) { | |||||||
|  |  | ||||||
| func TestReadUnflushedStderr(t *testing.T) { | func TestReadUnflushedStderr(t *testing.T) { | ||||||
|  |  | ||||||
| 	res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run() | 	res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"message101\", file=sys.stderr, end='')").Run() | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		t.Errorf("%v", err) | 		t.Errorf("%v", err) | ||||||
| 	} | 	} | ||||||
| @@ -180,7 +180,7 @@ func TestReadUnflushedStderr(t *testing.T) { | |||||||
| func TestPartialReadUnflushed(t *testing.T) { | func TestPartialReadUnflushed(t *testing.T) { | ||||||
| 	t.SkipNow() | 	t.SkipNow() | ||||||
|  |  | ||||||
| 	res1, err := Runner("python"). | 	res1, err := Runner("python3"). | ||||||
| 		Arg("-c"). | 		Arg("-c"). | ||||||
| 		Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');"). | 		Arg("import sys; import time; print(\"first message\", end=''); time.sleep(5); print(\"cant see me\", end='');"). | ||||||
| 		Timeout(100 * time.Millisecond). | 		Timeout(100 * time.Millisecond). | ||||||
| @@ -206,7 +206,7 @@ func TestPartialReadUnflushed(t *testing.T) { | |||||||
| func TestPartialReadUnflushedStderr(t *testing.T) { | func TestPartialReadUnflushedStderr(t *testing.T) { | ||||||
| 	t.SkipNow() | 	t.SkipNow() | ||||||
|  |  | ||||||
| 	res1, err := Runner("python"). | 	res1, err := Runner("python3"). | ||||||
| 		Arg("-c"). | 		Arg("-c"). | ||||||
| 		Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');"). | 		Arg("import sys; import time; print(\"first message\", file=sys.stderr, end=''); time.sleep(5); print(\"cant see me\", file=sys.stderr, end='');"). | ||||||
| 		Timeout(100 * time.Millisecond). | 		Timeout(100 * time.Millisecond). | ||||||
| @@ -231,7 +231,7 @@ func TestPartialReadUnflushedStderr(t *testing.T) { | |||||||
|  |  | ||||||
| func TestListener(t *testing.T) { | func TestListener(t *testing.T) { | ||||||
|  |  | ||||||
| 	res1, err := Runner("python"). | 	res1, err := Runner("python3"). | ||||||
| 		Arg("-c"). | 		Arg("-c"). | ||||||
| 		Arg("import sys;" + | 		Arg("import sys;" + | ||||||
| 			"import time;" + | 			"import time;" + | ||||||
| @@ -264,7 +264,7 @@ func TestListener(t *testing.T) { | |||||||
|  |  | ||||||
| func TestLongStdout(t *testing.T) { | func TestLongStdout(t *testing.T) { | ||||||
|  |  | ||||||
| 	res1, err := Runner("python"). | 	res1, err := Runner("python3"). | ||||||
| 		Arg("-c"). | 		Arg("-c"). | ||||||
| 		Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");"). | 		Arg("import sys; import time; print(\"X\" * 125001 + \"\\n\"); print(\"Y\" * 125001 + \"\\n\"); print(\"Z\" * 125001 + \"\\n\");"). | ||||||
| 		Timeout(5000 * time.Millisecond). | 		Timeout(5000 * time.Millisecond). | ||||||
| @@ -298,7 +298,7 @@ func TestFailOnTimeout(t *testing.T) { | |||||||
|  |  | ||||||
| func TestFailOnStderr(t *testing.T) { | func TestFailOnStderr(t *testing.T) { | ||||||
|  |  | ||||||
| 	res1, err := Runner("python").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").FailOnStderr().Run() | 	res1, err := Runner("python3").Arg("-c").Arg("import sys; print(\"error\", file=sys.stderr, end='')").FailOnStderr().Run() | ||||||
| 	if err == nil { | 	if err == nil { | ||||||
| 		t.Errorf("no err") | 		t.Errorf("no err") | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -32,8 +32,8 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string, | |||||||
| 	stdout := "" | 	stdout := "" | ||||||
| 	go func() { | 	go func() { | ||||||
| 		buf := make([]byte, 128) | 		buf := make([]byte, 128) | ||||||
| 		for true { | 		for { | ||||||
| 			n, out := pr.stdout.Read(buf) | 			n, err := pr.stdout.Read(buf) | ||||||
| 			if n > 0 { | 			if n > 0 { | ||||||
| 				txt := string(buf[:n]) | 				txt := string(buf[:n]) | ||||||
| 				stdout += txt | 				stdout += txt | ||||||
| @@ -42,11 +42,11 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string, | |||||||
| 					lstr.ReadRawStdout(buf[:n]) | 					lstr.ReadRawStdout(buf[:n]) | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| 			if out == io.EOF { | 			if err == io.EOF { | ||||||
| 				break | 				break | ||||||
| 			} | 			} | ||||||
| 			if out != nil { | 			if err != nil { | ||||||
| 				errch <- out | 				errch <- err | ||||||
| 				break | 				break | ||||||
| 			} | 			} | ||||||
| 		} | 		} | ||||||
| @@ -61,7 +61,7 @@ func (pr *pipeReader) Read(listener []CommandListener) (string, string, string, | |||||||
| 	stderr := "" | 	stderr := "" | ||||||
| 	go func() { | 	go func() { | ||||||
| 		buf := make([]byte, 128) | 		buf := make([]byte, 128) | ||||||
| 		for true { | 		for { | ||||||
| 			n, err := pr.stderr.Read(buf) | 			n, err := pr.stderr.Read(buf) | ||||||
|  |  | ||||||
| 			if n > 0 { | 			if n > 0 { | ||||||
|   | |||||||
| @@ -41,13 +41,13 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error | |||||||
| 			continue | 			continue | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		if rvfield.Kind() == reflect.Struct { |  | ||||||
|  |  | ||||||
| 		envkey, found := rsfield.Tag.Lookup("env") | 		envkey, found := rsfield.Tag.Lookup("env") | ||||||
| 		if !found || envkey == "-" { | 		if !found || envkey == "-" { | ||||||
| 			continue | 			continue | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if rvfield.Kind() == reflect.Struct && rvfield.Type() != reflect.TypeOf(time.UnixMilli(0)) { | ||||||
|  |  | ||||||
| 			subPrefix := prefix | 			subPrefix := prefix | ||||||
| 			if envkey != "" { | 			if envkey != "" { | ||||||
| 				subPrefix = subPrefix + envkey + delim | 				subPrefix = subPrefix + envkey + delim | ||||||
| @@ -57,10 +57,7 @@ func processEnvOverrides(rval reflect.Value, delim string, prefix string) error | |||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return err | 				return err | ||||||
| 			} | 			} | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		envkey := rsfield.Tag.Get("env") |  | ||||||
| 		if envkey == "" || envkey == "-" { |  | ||||||
| 			continue | 			continue | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|   | |||||||
| @@ -66,7 +66,6 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo | |||||||
| 		return int(version), nil, payload, false, nil, true | 		return int(version), nil, payload, false, nil, true | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// |  | ||||||
| 	if version == 2 { | 	if version == 2 { | ||||||
| 		if len(split) != 3 { | 		if len(split) != 3 { | ||||||
| 			return -1, nil, nil, false, nil, false | 			return -1, nil, nil, false, nil, false | ||||||
|   | |||||||
							
								
								
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,263 @@ | |||||||
|  | package cryptext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"crypto/rand" | ||||||
|  | 	"io" | ||||||
|  | 	"math/big" | ||||||
|  | 	mathrand "math/rand" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	ppStartChar            = "BCDFGHJKLMNPQRSTVWXZ" | ||||||
|  | 	ppEndChar              = "ABDEFIKMNORSTUXYZ" | ||||||
|  | 	ppVowel                = "AEIOUY" | ||||||
|  | 	ppConsonant            = "BCDFGHJKLMNPQRSTVWXZ" | ||||||
|  | 	ppSegmentLenMin        = 3 | ||||||
|  | 	ppSegmentLenMax        = 7 | ||||||
|  | 	ppMaxRepeatedVowel     = 2 | ||||||
|  | 	ppMaxRepeatedConsonant = 2 | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var ppContinuation = map[uint8]string{ | ||||||
|  | 	'A': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'B': "ADFIKLMNORSTUY", | ||||||
|  | 	'C': "AEIKOUY", | ||||||
|  | 	'D': "AEILORSUYZ", | ||||||
|  | 	'E': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'F': "ADEGIKLOPRTUY", | ||||||
|  | 	'G': "ABDEFHILMNORSTUY", | ||||||
|  | 	'H': "AEIOUY", | ||||||
|  | 	'I': "BCDFGHJKLMNPRSTVWXZ", | ||||||
|  | 	'J': "AEIOUY", | ||||||
|  | 	'K': "ADEFHILMNORSTUY", | ||||||
|  | 	'L': "ADEFGIJKMNOPSTUVWYZ", | ||||||
|  | 	'M': "ABEFIKOPSTUY", | ||||||
|  | 	'N': "ABEFIKOPSTUY", | ||||||
|  | 	'O': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'P': "AEFIJLORSTUY", | ||||||
|  | 	'Q': "AEIOUY", | ||||||
|  | 	'R': "ADEFGHIJKLMNOPSTUVYZ", | ||||||
|  | 	'S': "ACDEIKLOPTUYZ", | ||||||
|  | 	'T': "AEHIJOPRSUWY", | ||||||
|  | 	'U': "BCDFGHJKLMNPRSTVWXZ", | ||||||
|  | 	'V': "AEIOUY", | ||||||
|  | 	'W': "AEIOUY", | ||||||
|  | 	'X': "AEIOUY", | ||||||
|  | 	'Y': "ABCDFGHKLMNPRSTVXZ", | ||||||
|  | 	'Z': "AEILOTUY", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var ppLog2Map = map[int]float64{ | ||||||
|  | 	1:  0.00000000, | ||||||
|  | 	2:  1.00000000, | ||||||
|  | 	3:  1.58496250, | ||||||
|  | 	4:  2.00000000, | ||||||
|  | 	5:  2.32192809, | ||||||
|  | 	6:  2.58496250, | ||||||
|  | 	7:  2.80735492, | ||||||
|  | 	8:  3.00000000, | ||||||
|  | 	9:  3.16992500, | ||||||
|  | 	10: 3.32192809, | ||||||
|  | 	11: 3.45943162, | ||||||
|  | 	12: 3.58496250, | ||||||
|  | 	13: 3.70043972, | ||||||
|  | 	14: 3.80735492, | ||||||
|  | 	15: 3.90689060, | ||||||
|  | 	16: 4.00000000, | ||||||
|  | 	17: 4.08746284, | ||||||
|  | 	18: 4.16992500, | ||||||
|  | 	19: 4.24792751, | ||||||
|  | 	20: 4.32192809, | ||||||
|  | 	21: 4.39231742, | ||||||
|  | 	22: 4.45943162, | ||||||
|  | 	23: 4.52356196, | ||||||
|  | 	24: 4.58496250, | ||||||
|  | 	25: 4.64385619, | ||||||
|  | 	26: 4.70043972, | ||||||
|  | 	27: 4.75488750, | ||||||
|  | 	28: 4.80735492, | ||||||
|  | 	29: 4.85798100, | ||||||
|  | 	30: 4.90689060, | ||||||
|  | 	31: 4.95419631, | ||||||
|  | 	32: 5.00000000, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var ( | ||||||
|  | 	ppVowelMap     = ppMakeSet(ppVowel) | ||||||
|  | 	ppConsonantMap = ppMakeSet(ppConsonant) | ||||||
|  | 	ppEndCharMap   = ppMakeSet(ppEndChar) | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func ppMakeSet(v string) map[uint8]bool { | ||||||
|  | 	mp := make(map[uint8]bool, len(v)) | ||||||
|  | 	for _, chr := range v { | ||||||
|  | 		mp[uint8(chr)] = true | ||||||
|  | 	} | ||||||
|  | 	return mp | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppRandInt(rng io.Reader, max int) int { | ||||||
|  | 	v, err := rand.Int(rng, big.NewInt(int64(max))) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return int(v.Int64()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppRand(rng io.Reader, chars string, entropy *float64) uint8 { | ||||||
|  | 	chr := chars[ppRandInt(rng, len(chars))] | ||||||
|  |  | ||||||
|  | 	*entropy = *entropy + ppLog2Map[len(chars)] | ||||||
|  |  | ||||||
|  | 	return chr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharType(chr uint8) (bool, bool) { | ||||||
|  | 	_, ok1 := ppVowelMap[chr] | ||||||
|  | 	_, ok2 := ppConsonantMap[chr] | ||||||
|  |  | ||||||
|  | 	return ok1, ok2 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharsetRemove(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||||
|  | 	result := "" | ||||||
|  | 	for _, chr := range cs { | ||||||
|  | 		if _, ok := set[uint8(chr)]; !ok { | ||||||
|  | 			result += string(chr) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if result == "" && !allowEmpty { | ||||||
|  | 		return cs | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharsetFilter(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||||
|  | 	result := "" | ||||||
|  | 	for _, chr := range cs { | ||||||
|  | 		if _, ok := set[uint8(chr)]; ok { | ||||||
|  | 			result += string(chr) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if result == "" && !allowEmpty { | ||||||
|  | 		return cs | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePasswordExt(rng io.Reader, pwlen int) (string, float64) { | ||||||
|  |  | ||||||
|  | 	// kinda pseudo markov-chain - with a few extra rules and no weights... | ||||||
|  |  | ||||||
|  | 	if pwlen <= 0 { | ||||||
|  | 		return "", 0 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	vowelCount := 0 | ||||||
|  | 	consoCount := 0 | ||||||
|  | 	entropy := float64(0) | ||||||
|  |  | ||||||
|  | 	startChar := ppRand(rng, ppStartChar, &entropy) | ||||||
|  |  | ||||||
|  | 	result := string(startChar) | ||||||
|  | 	currentChar := startChar | ||||||
|  |  | ||||||
|  | 	isVowel, isConsonant := ppCharType(currentChar) | ||||||
|  | 	if isVowel { | ||||||
|  | 		vowelCount = 1 | ||||||
|  | 	} | ||||||
|  | 	if isConsonant { | ||||||
|  | 		consoCount = ppMaxRepeatedConsonant | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	segmentLen := 1 | ||||||
|  |  | ||||||
|  | 	segmentLenTarget := ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||||
|  |  | ||||||
|  | 	for len(result) < pwlen { | ||||||
|  |  | ||||||
|  | 		charset := ppContinuation[currentChar] | ||||||
|  | 		if vowelCount >= ppMaxRepeatedVowel { | ||||||
|  | 			charset = ppCharsetRemove(charset, ppVowelMap, false) | ||||||
|  | 		} | ||||||
|  | 		if consoCount >= ppMaxRepeatedConsonant { | ||||||
|  | 			charset = ppCharsetRemove(charset, ppConsonantMap, false) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		lastOfSegment := false | ||||||
|  | 		newSegment := false | ||||||
|  |  | ||||||
|  | 		if len(result)+1 == pwlen { | ||||||
|  | 			// last of result | ||||||
|  | 			charset = ppCharsetFilter(charset, ppEndCharMap, false) | ||||||
|  | 		} else if segmentLen+1 == segmentLenTarget { | ||||||
|  | 			// last of segment | ||||||
|  | 			charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||||
|  | 			if charsetNew != "" { | ||||||
|  | 				charset = charsetNew | ||||||
|  | 				lastOfSegment = true | ||||||
|  | 			} | ||||||
|  | 		} else if segmentLen >= segmentLenTarget { | ||||||
|  | 			// (perhaps) start of new segment | ||||||
|  | 			if _, ok := ppEndCharMap[currentChar]; ok { | ||||||
|  | 				charset = ppStartChar | ||||||
|  | 				newSegment = true | ||||||
|  | 			} else { | ||||||
|  | 				// continue segment for one more char to (hopefully) find an end-char | ||||||
|  | 				charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||||
|  | 				if charsetNew != "" { | ||||||
|  | 					charset = charsetNew | ||||||
|  | 					lastOfSegment = true | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 		} else { | ||||||
|  | 			// normal continuation | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		newChar := ppRand(rng, charset, &entropy) | ||||||
|  | 		if lastOfSegment { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen++ | ||||||
|  | 			result += strings.ToLower(string(newChar)) | ||||||
|  | 		} else if newSegment { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen = 1 | ||||||
|  | 			result += strings.ToUpper(string(newChar)) | ||||||
|  | 			segmentLenTarget = ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||||
|  | 			vowelCount = 0 | ||||||
|  | 			consoCount = 0 | ||||||
|  | 		} else { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen++ | ||||||
|  | 			result += strings.ToLower(string(newChar)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		isVowel, isConsonant := ppCharType(currentChar) | ||||||
|  | 		if isVowel { | ||||||
|  | 			vowelCount++ | ||||||
|  | 			consoCount = 0 | ||||||
|  | 		} | ||||||
|  | 		if isConsonant { | ||||||
|  | 			vowelCount = 0 | ||||||
|  | 			if newSegment { | ||||||
|  | 				consoCount = ppMaxRepeatedConsonant | ||||||
|  | 			} else { | ||||||
|  | 				consoCount++ | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, entropy | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePassword(len int) string { | ||||||
|  | 	v, _ := PronouncablePasswordExt(rand.Reader, len) | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePasswordSeeded(seed int64, len int) string { | ||||||
|  |  | ||||||
|  | 	v, _ := PronouncablePasswordExt(mathrand.New(mathrand.NewSource(seed)), len) | ||||||
|  | 	return v | ||||||
|  | } | ||||||
							
								
								
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | |||||||
|  | package cryptext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"math/rand" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordExt(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw, entropy := PronouncablePasswordExt(rand.New(rand.NewSource(int64(i))), 16) | ||||||
|  | 		fmt.Printf("[%.2f] => %s\n", entropy, pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordSeeded(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw := PronouncablePasswordSeeded(int64(i), 8) | ||||||
|  | 		fmt.Printf("%s\n", pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePassword(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw := PronouncablePassword(i + 1) | ||||||
|  | 		fmt.Printf("%s\n", pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordWrongLen(t *testing.T) { | ||||||
|  | 	PronouncablePassword(0) | ||||||
|  | 	PronouncablePassword(-1) | ||||||
|  | 	PronouncablePassword(-2) | ||||||
|  | 	PronouncablePassword(-3) | ||||||
|  | } | ||||||
| @@ -4,6 +4,10 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | type RawFilter interface { | ||||||
|  | 	FilterQuery() mongo.Pipeline | ||||||
|  | } | ||||||
|  |  | ||||||
| type Filter interface { | type Filter interface { | ||||||
| 	FilterQuery() mongo.Pipeline | 	FilterQuery() mongo.Pipeline | ||||||
| 	Pagination() (string, SortDirection, string, SortDirection) | 	Pagination() (string, SortDirection, string, SortDirection) | ||||||
|   | |||||||
							
								
								
									
										254
									
								
								dataext/casMutex.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										254
									
								
								dataext/casMutex.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,254 @@ | |||||||
|  | package dataext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"golang.org/x/sync/semaphore" | ||||||
|  | 	"runtime" | ||||||
|  | 	"sync" | ||||||
|  | 	"sync/atomic" | ||||||
|  | 	"time" | ||||||
|  | 	"unsafe" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // from https://github.com/viney-shih/go-lock/blob/2f19fd8ce335e33e0ab9dccb1ff2ce820c3da332/cas.go | ||||||
|  |  | ||||||
|  | // CASMutex is the struct implementing RWMutex with CAS mechanism. | ||||||
|  | type CASMutex struct { | ||||||
|  | 	state     casState | ||||||
|  | 	turnstile *semaphore.Weighted | ||||||
|  |  | ||||||
|  | 	broadcastChan chan struct{} | ||||||
|  | 	broadcastMut  sync.RWMutex | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewCASMutex() *CASMutex { | ||||||
|  | 	return &CASMutex{ | ||||||
|  | 		state:         casStateNoLock, | ||||||
|  | 		turnstile:     semaphore.NewWeighted(1), | ||||||
|  | 		broadcastChan: make(chan struct{}), | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type casState int32 | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	casStateUndefined casState = iota - 2 // -2 | ||||||
|  | 	casStateWriteLock                     // -1 | ||||||
|  | 	casStateNoLock                        // 0 | ||||||
|  | 	casStateReadLock                      // >= 1 | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (m *CASMutex) getState(n int32) casState { | ||||||
|  | 	switch st := casState(n); { | ||||||
|  | 	case st == casStateWriteLock: | ||||||
|  | 		fallthrough | ||||||
|  | 	case st == casStateNoLock: | ||||||
|  | 		return st | ||||||
|  | 	case st >= casStateReadLock: | ||||||
|  | 		return casStateReadLock | ||||||
|  | 	default: | ||||||
|  | 		// actually, it should not happened. | ||||||
|  | 		return casStateUndefined | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m *CASMutex) listen() <-chan struct{} { | ||||||
|  | 	m.broadcastMut.RLock() | ||||||
|  | 	defer m.broadcastMut.RUnlock() | ||||||
|  |  | ||||||
|  | 	return m.broadcastChan | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m *CASMutex) broadcast() { | ||||||
|  | 	newCh := make(chan struct{}) | ||||||
|  |  | ||||||
|  | 	m.broadcastMut.Lock() | ||||||
|  | 	ch := m.broadcastChan | ||||||
|  | 	m.broadcastChan = newCh | ||||||
|  | 	m.broadcastMut.Unlock() | ||||||
|  |  | ||||||
|  | 	close(ch) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m *CASMutex) tryLock(ctx context.Context) bool { | ||||||
|  | 	for { | ||||||
|  | 		broker := m.listen() | ||||||
|  | 		if atomic.CompareAndSwapInt32( | ||||||
|  | 			(*int32)(unsafe.Pointer(&m.state)), | ||||||
|  | 			int32(casStateNoLock), | ||||||
|  | 			int32(casStateWriteLock), | ||||||
|  | 		) { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if ctx == nil { | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		select { | ||||||
|  | 		case <-ctx.Done(): | ||||||
|  | 			// timeout or cancellation | ||||||
|  | 			return false | ||||||
|  | 		case <-broker: | ||||||
|  | 			// waiting for signal triggered by m.broadcast() and trying again. | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // TryLockWithContext attempts to acquire the lock, blocking until resources | ||||||
|  | // are available or ctx is done (timeout or cancellation). | ||||||
|  | func (m *CASMutex) TryLockWithContext(ctx context.Context) bool { | ||||||
|  | 	if err := m.turnstile.Acquire(ctx, 1); err != nil { | ||||||
|  | 		// Acquire failed due to timeout or cancellation | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defer m.turnstile.Release(1) | ||||||
|  |  | ||||||
|  | 	return m.tryLock(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // Lock acquires the lock. | ||||||
|  | // If it is currently held by others, Lock will wait until it has a chance to acquire it. | ||||||
|  | func (m *CASMutex) Lock() { | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	m.TryLockWithContext(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // TryLock attempts to acquire the lock without blocking. | ||||||
|  | // Return false if someone is holding it now. | ||||||
|  | func (m *CASMutex) TryLock() bool { | ||||||
|  | 	if !m.turnstile.TryAcquire(1) { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defer m.turnstile.Release(1) | ||||||
|  |  | ||||||
|  | 	return m.tryLock(nil) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // TryLockWithTimeout attempts to acquire the lock within a period of time. | ||||||
|  | // Return false if spending time is more than duration and no chance to acquire it. | ||||||
|  | func (m *CASMutex) TryLockWithTimeout(duration time.Duration) bool { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), duration) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	return m.TryLockWithContext(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // Unlock releases the lock. | ||||||
|  | func (m *CASMutex) Unlock() { | ||||||
|  | 	if ok := atomic.CompareAndSwapInt32( | ||||||
|  | 		(*int32)(unsafe.Pointer(&m.state)), | ||||||
|  | 		int32(casStateWriteLock), | ||||||
|  | 		int32(casStateNoLock), | ||||||
|  | 	); !ok { | ||||||
|  | 		panic("Unlock failed") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	m.broadcast() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m *CASMutex) rTryLock(ctx context.Context) bool { | ||||||
|  | 	for { | ||||||
|  | 		broker := m.listen() | ||||||
|  | 		n := atomic.LoadInt32((*int32)(unsafe.Pointer(&m.state))) | ||||||
|  | 		st := m.getState(n) | ||||||
|  | 		switch st { | ||||||
|  | 		case casStateNoLock, casStateReadLock: | ||||||
|  | 			if atomic.CompareAndSwapInt32((*int32)(unsafe.Pointer(&m.state)), n, n+1) { | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if ctx == nil { | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		select { | ||||||
|  | 		case <-ctx.Done(): | ||||||
|  | 			// timeout or cancellation | ||||||
|  | 			return false | ||||||
|  | 		default: | ||||||
|  | 			switch st { | ||||||
|  | 			// read-lock failed due to concurrence issue, try again immediately | ||||||
|  | 			case casStateNoLock, casStateReadLock: | ||||||
|  | 				runtime.Gosched() // allow other goroutines to do stuff. | ||||||
|  | 				continue | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		select { | ||||||
|  | 		case <-ctx.Done(): | ||||||
|  | 			// timeout or cancellation | ||||||
|  | 			return false | ||||||
|  | 		case <-broker: | ||||||
|  | 			// waiting for signal triggered by m.broadcast() and trying again. | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RTryLockWithContext attempts to acquire the read lock, blocking until resources | ||||||
|  | // are available or ctx is done (timeout or cancellation). | ||||||
|  | func (m *CASMutex) RTryLockWithContext(ctx context.Context) bool { | ||||||
|  | 	if err := m.turnstile.Acquire(ctx, 1); err != nil { | ||||||
|  | 		// Acquire failed due to timeout or cancellation | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	m.turnstile.Release(1) | ||||||
|  |  | ||||||
|  | 	return m.rTryLock(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RLock acquires the read lock. | ||||||
|  | // If it is currently held by others writing, RLock will wait until it has a chance to acquire it. | ||||||
|  | func (m *CASMutex) RLock() { | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	m.RTryLockWithContext(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RTryLock attempts to acquire the read lock without blocking. | ||||||
|  | // Return false if someone is writing it now. | ||||||
|  | func (m *CASMutex) RTryLock() bool { | ||||||
|  | 	if !m.turnstile.TryAcquire(1) { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	m.turnstile.Release(1) | ||||||
|  |  | ||||||
|  | 	return m.rTryLock(nil) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RTryLockWithTimeout attempts to acquire the read lock within a period of time. | ||||||
|  | // Return false if spending time is more than duration and no chance to acquire it. | ||||||
|  | func (m *CASMutex) RTryLockWithTimeout(duration time.Duration) bool { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), duration) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	return m.RTryLockWithContext(ctx) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RUnlock releases the read lock. | ||||||
|  | func (m *CASMutex) RUnlock() { | ||||||
|  | 	n := atomic.AddInt32((*int32)(unsafe.Pointer(&m.state)), -1) | ||||||
|  | 	switch m.getState(n) { | ||||||
|  | 	case casStateUndefined, casStateWriteLock: | ||||||
|  | 		panic("RUnlock failed") | ||||||
|  | 	case casStateNoLock: | ||||||
|  | 		m.broadcast() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // RLocker returns a Locker interface that implements the Lock and Unlock methods | ||||||
|  | // by calling CASMutex.RLock and CASMutex.RUnlock. | ||||||
|  | func (m *CASMutex) RLocker() sync.Locker { | ||||||
|  | 	return (*rlocker)(m) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type rlocker CASMutex | ||||||
|  |  | ||||||
|  | func (r *rlocker) Lock()   { (*CASMutex)(r).RLock() } | ||||||
|  | func (r *rlocker) Unlock() { (*CASMutex)(r).RUnlock() } | ||||||
							
								
								
									
										59
									
								
								dataext/optional.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								dataext/optional.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | |||||||
|  | package dataext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type JsonOpt[T any] struct { | ||||||
|  | 	isSet bool | ||||||
|  | 	value T | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // MarshalJSON returns m as the JSON encoding of m. | ||||||
|  | func (m JsonOpt[T]) MarshalJSON() ([]byte, error) { | ||||||
|  | 	if !m.isSet { | ||||||
|  | 		return []byte("null"), nil // actually this would be undefined - but undefined is not valid JSON | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return json.Marshal(m.value) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // UnmarshalJSON sets *m to a copy of data. | ||||||
|  | func (m *JsonOpt[T]) UnmarshalJSON(data []byte) error { | ||||||
|  | 	if m == nil { | ||||||
|  | 		return errors.New("JsonOpt: UnmarshalJSON on nil pointer") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	m.isSet = true | ||||||
|  | 	return json.Unmarshal(data, &m.value) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) IsSet() bool { | ||||||
|  | 	return m.isSet | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) IsUnset() bool { | ||||||
|  | 	return !m.isSet | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) Value() (T, bool) { | ||||||
|  | 	if !m.isSet { | ||||||
|  | 		return *new(T), false | ||||||
|  | 	} | ||||||
|  | 	return m.value, true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) ValueOrNil() *T { | ||||||
|  | 	if !m.isSet { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return &m.value | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (m JsonOpt[T]) MustValue() T { | ||||||
|  | 	if !m.isSet { | ||||||
|  | 		panic("value not set") | ||||||
|  | 	} | ||||||
|  | 	return m.value | ||||||
|  | } | ||||||
							
								
								
									
										163
									
								
								dataext/syncMap.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										163
									
								
								dataext/syncMap.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,163 @@ | |||||||
|  | package dataext | ||||||
|  |  | ||||||
|  | import "sync" | ||||||
|  |  | ||||||
|  | type SyncMap[TKey comparable, TData any] struct { | ||||||
|  | 	data map[TKey]TData | ||||||
|  | 	lock sync.Mutex | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Set(key TKey, data TData) { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	s.data[key] = data | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) SetIfNotContains(key TKey, data TData) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if _, existsInPreState := s.data[key]; existsInPreState { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	s.data[key] = data | ||||||
|  |  | ||||||
|  | 	return true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) SetIfNotContainsFunc(key TKey, data func() TData) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if _, existsInPreState := s.data[key]; existsInPreState { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	s.data[key] = data() | ||||||
|  |  | ||||||
|  | 	return true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v, ok := s.data[key]; ok { | ||||||
|  | 		return v, true | ||||||
|  | 	} else { | ||||||
|  | 		return *new(TData), false | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAndSetIfNotContains(key TKey, data TData) TData { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v, ok := s.data[key]; ok { | ||||||
|  | 		return v | ||||||
|  | 	} else { | ||||||
|  | 		s.data[key] = data | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAndSetIfNotContainsFunc(key TKey, data func() TData) TData { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v, ok := s.data[key]; ok { | ||||||
|  | 		return v | ||||||
|  | 	} else { | ||||||
|  | 		dataObj := data() | ||||||
|  | 		s.data[key] = dataObj | ||||||
|  | 		return dataObj | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Delete(key TKey) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	_, ok := s.data[key] | ||||||
|  |  | ||||||
|  | 	delete(s.data, key) | ||||||
|  |  | ||||||
|  | 	return ok | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Contains(key TKey) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	_, ok := s.data[key] | ||||||
|  |  | ||||||
|  | 	return ok | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAllKeys() []TKey { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := make([]TKey, 0, len(s.data)) | ||||||
|  |  | ||||||
|  | 	for k := range s.data { | ||||||
|  | 		r = append(r, k) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAllValues() []TData { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := make([]TData, 0, len(s.data)) | ||||||
|  |  | ||||||
|  | 	for _, v := range s.data { | ||||||
|  | 		r = append(r, v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
							
								
								
									
										170
									
								
								dataext/tuple.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										170
									
								
								dataext/tuple.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,170 @@ | |||||||
|  | package dataext | ||||||
|  |  | ||||||
|  | type ValueGroup interface { | ||||||
|  | 	TupleLength() int | ||||||
|  | 	TupleValues() []any | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Single[T1 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s Single[T1]) TupleLength() int { | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s Single[T1]) TupleValues() []any { | ||||||
|  | 	return []any{s.V1} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Tuple[T1 any, T2 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | 	V2 T2 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Tuple[T1, T2]) TupleLength() int { | ||||||
|  | 	return 2 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Tuple[T1, T2]) TupleValues() []any { | ||||||
|  | 	return []any{t.V1, t.V2} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Triple[T1 any, T2 any, T3 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | 	V2 T2 | ||||||
|  | 	V3 T3 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Triple[T1, T2, T3]) TupleLength() int { | ||||||
|  | 	return 3 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Triple[T1, T2, T3]) TupleValues() []any { | ||||||
|  | 	return []any{t.V1, t.V2, t.V3} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Quadruple[T1 any, T2 any, T3 any, T4 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | 	V2 T2 | ||||||
|  | 	V3 T3 | ||||||
|  | 	V4 T4 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Quadruple[T1, T2, T3, T4]) TupleLength() int { | ||||||
|  | 	return 4 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any { | ||||||
|  | 	return []any{t.V1, t.V2, t.V3, t.V4} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | 	V2 T2 | ||||||
|  | 	V3 T3 | ||||||
|  | 	V4 T4 | ||||||
|  | 	V5 T5 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Quintuple[T1, T2, T3, T4, T5]) TupleLength() int { | ||||||
|  | 	return 5 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any { | ||||||
|  | 	return []any{t.V1, t.V2, t.V3, t.V4, t.V5} | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | 	V2 T2 | ||||||
|  | 	V3 T3 | ||||||
|  | 	V4 T4 | ||||||
|  | 	V5 T5 | ||||||
|  | 	V6 T6 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleLength() int { | ||||||
|  | 	return 6 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any { | ||||||
|  | 	return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6} | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | 	V2 T2 | ||||||
|  | 	V3 T3 | ||||||
|  | 	V4 T4 | ||||||
|  | 	V5 T5 | ||||||
|  | 	V6 T6 | ||||||
|  | 	V7 T7 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleLength() int { | ||||||
|  | 	return 7 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any { | ||||||
|  | 	return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | 	V2 T2 | ||||||
|  | 	V3 T3 | ||||||
|  | 	V4 T4 | ||||||
|  | 	V5 T5 | ||||||
|  | 	V6 T6 | ||||||
|  | 	V7 T7 | ||||||
|  | 	V8 T8 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleLength() int { | ||||||
|  | 	return 8 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any { | ||||||
|  | 	return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct { | ||||||
|  | 	V1 T1 | ||||||
|  | 	V2 T2 | ||||||
|  | 	V3 T3 | ||||||
|  | 	V4 T4 | ||||||
|  | 	V5 T5 | ||||||
|  | 	V6 T6 | ||||||
|  | 	V7 T7 | ||||||
|  | 	V8 T8 | ||||||
|  | 	V9 T9 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int { | ||||||
|  | 	return 9 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any { | ||||||
|  | 	return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9} | ||||||
|  | } | ||||||
| @@ -5,6 +5,8 @@ type Enum interface { | |||||||
| 	ValuesAny() []any | 	ValuesAny() []any | ||||||
| 	ValuesMeta() []EnumMetaValue | 	ValuesMeta() []EnumMetaValue | ||||||
| 	VarName() string | 	VarName() string | ||||||
|  | 	TypeName() string | ||||||
|  | 	PackageName() string | ||||||
| } | } | ||||||
|  |  | ||||||
| type StringEnum interface { | type StringEnum interface { | ||||||
| @@ -15,10 +17,17 @@ type StringEnum interface { | |||||||
| type DescriptionEnum interface { | type DescriptionEnum interface { | ||||||
| 	Enum | 	Enum | ||||||
| 	Description() string | 	Description() string | ||||||
|  | 	DescriptionMeta() EnumDescriptionMetaValue | ||||||
| } | } | ||||||
|  |  | ||||||
| type EnumMetaValue struct { | type EnumMetaValue struct { | ||||||
| 	VarName     string  `json:"varName"` | 	VarName     string  `json:"varName"` | ||||||
| 	Value       any     `json:"value"` | 	Value       Enum    `json:"value"` | ||||||
| 	Description *string `json:"description"` | 	Description *string `json:"description"` | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type EnumDescriptionMetaValue struct { | ||||||
|  | 	VarName     string `json:"varName"` | ||||||
|  | 	Value       Enum   `json:"value"` | ||||||
|  | 	Description string `json:"description"` | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										102
									
								
								exerr/builder.go
									
									
									
									
									
								
							
							
						
						
									
										102
									
								
								exerr/builder.go
									
									
									
									
									
								
							| @@ -68,8 +68,11 @@ func init() { | |||||||
| } | } | ||||||
|  |  | ||||||
| type Builder struct { | type Builder struct { | ||||||
|  | 	wrappedErr          error | ||||||
| 	errorData           *ExErr | 	errorData           *ExErr | ||||||
| 	containsGinData     bool | 	containsGinData     bool | ||||||
|  | 	containsContextData bool | ||||||
|  | 	noLog               bool | ||||||
| } | } | ||||||
|  |  | ||||||
| func Get(err error) *Builder { | func Get(err error) *Builder { | ||||||
| @@ -88,9 +91,9 @@ func Wrap(err error, msg string) *Builder { | |||||||
| 	if !pkgconfig.RecursiveErrors { | 	if !pkgconfig.RecursiveErrors { | ||||||
| 		v := FromError(err) | 		v := FromError(err) | ||||||
| 		v.Message = msg | 		v.Message = msg | ||||||
| 		return &Builder{errorData: v} | 		return &Builder{wrappedErr: err, errorData: v} | ||||||
| 	} | 	} | ||||||
| 	return &Builder{errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | 	return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | ||||||
| } | } | ||||||
|  |  | ||||||
| // ---------------------------------------------------------------------------- | // ---------------------------------------------------------------------------- | ||||||
| @@ -190,6 +193,13 @@ func (b *Builder) System() *Builder { | |||||||
|  |  | ||||||
| // ---------------------------------------------------------------------------- | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | func (b *Builder) NoLog() *Builder { | ||||||
|  | 	b.noLog = true | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
| func (b *Builder) Id(key string, val fmt.Stringer) *Builder { | func (b *Builder) Id(key string, val fmt.Stringer) *Builder { | ||||||
| 	return b.addMeta(key, MDTID, newIDWrap(val)) | 	return b.addMeta(key, MDTID, newIDWrap(val)) | ||||||
| } | } | ||||||
| @@ -275,7 +285,7 @@ func (b *Builder) Any(key string, val any) *Builder { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder { | func (b *Builder) Stringer(key string, val fmt.Stringer) *Builder { | ||||||
| 	if val == nil { | 	if langext.IsNil(val) { | ||||||
| 		return b.addMeta(key, MDTString, "(!nil)") | 		return b.addMeta(key, MDTString, "(!nil)") | ||||||
| 	} else { | 	} else { | ||||||
| 		return b.addMeta(key, MDTString, val.String()) | 		return b.addMeta(key, MDTString, val.String()) | ||||||
| @@ -300,27 +310,27 @@ func (b *Builder) Errs(key string, val []error) *Builder { | |||||||
| func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder { | func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) *Builder { | ||||||
| 	if v := ctx.Value("start_timestamp"); v != nil { | 	if v := ctx.Value("start_timestamp"); v != nil { | ||||||
| 		if t, ok := v.(time.Time); ok { | 		if t, ok := v.(time.Time); ok { | ||||||
| 			b.Time("ctx.startTimestamp", t) | 			b.Time("ctx_startTimestamp", t) | ||||||
| 			b.Time("ctx.endTimestamp", time.Now()) | 			b.Time("ctx_endTimestamp", time.Now()) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	b.Str("gin.method", req.Method) | 	b.Str("gin_method", req.Method) | ||||||
| 	b.Str("gin.path", g.FullPath()) | 	b.Str("gin_path", g.FullPath()) | ||||||
| 	b.Strs("gin.header", extractHeader(g.Request.Header)) | 	b.Strs("gin_header", extractHeader(g.Request.Header)) | ||||||
| 	if req.URL != nil { | 	if req.URL != nil { | ||||||
| 		b.Str("gin.url", req.URL.String()) | 		b.Str("gin_url", req.URL.String()) | ||||||
| 	} | 	} | ||||||
| 	if ctxVal := g.GetString("apiversion"); ctxVal != "" { | 	if ctxVal := g.GetString("apiversion"); ctxVal != "" { | ||||||
| 		b.Str("gin.context.apiversion", ctxVal) | 		b.Str("gin_context_apiversion", ctxVal) | ||||||
| 	} | 	} | ||||||
| 	if ctxVal := g.GetString("uid"); ctxVal != "" { | 	if ctxVal := g.GetString("uid"); ctxVal != "" { | ||||||
| 		b.Str("gin.context.uid", ctxVal) | 		b.Str("gin_context_uid", ctxVal) | ||||||
| 	} | 	} | ||||||
| 	if ctxVal := g.GetString("fcmId"); ctxVal != "" { | 	if ctxVal := g.GetString("fcmId"); ctxVal != "" { | ||||||
| 		b.Str("gin.context.fcmid", ctxVal) | 		b.Str("gin_context_fcmid", ctxVal) | ||||||
| 	} | 	} | ||||||
| 	if ctxVal := g.GetString("reqid"); ctxVal != "" { | 	if ctxVal := g.GetString("reqid"); ctxVal != "" { | ||||||
| 		b.Str("gin.context.reqid", ctxVal) | 		b.Str("gin_context_reqid", ctxVal) | ||||||
| 	} | 	} | ||||||
| 	if req.Method != "GET" && req.Body != nil { | 	if req.Method != "GET" && req.Body != nil { | ||||||
|  |  | ||||||
| @@ -331,12 +341,12 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | |||||||
| 						var prettyJSON bytes.Buffer | 						var prettyJSON bytes.Buffer | ||||||
| 						err = json.Indent(&prettyJSON, bin, "", "  ") | 						err = json.Indent(&prettyJSON, bin, "", "  ") | ||||||
| 						if err == nil { | 						if err == nil { | ||||||
| 							b.Str("gin.body", string(prettyJSON.Bytes())) | 							b.Str("gin_body", string(prettyJSON.Bytes())) | ||||||
| 						} else { | 						} else { | ||||||
| 							b.Bytes("gin.body", bin) | 							b.Bytes("gin_body", bin) | ||||||
| 						} | 						} | ||||||
| 					} else { | 					} else { | ||||||
| 						b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | 						b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | ||||||
| 					} | 					} | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| @@ -346,9 +356,9 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | |||||||
| 			if brc, ok := req.Body.(dataext.BufferedReadCloser); ok { | 			if brc, ok := req.Body.(dataext.BufferedReadCloser); ok { | ||||||
| 				if bin, err := brc.BufferedAll(); err == nil { | 				if bin, err := brc.BufferedAll(); err == nil { | ||||||
| 					if len(bin) < 16*1024 { | 					if len(bin) < 16*1024 { | ||||||
| 						b.Bytes("gin.body", bin) | 						b.Bytes("gin_body", bin) | ||||||
| 					} else { | 					} else { | ||||||
| 						b.Str("gin.body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | 						b.Str("gin_body", fmt.Sprintf("[[%v bytes | %s]]", len(bin), req.Header.Get("Content-Type"))) | ||||||
| 					} | 					} | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
| @@ -356,10 +366,20 @@ func (b *Builder) GinReq(ctx context.Context, g *gin.Context, req *http.Request) | |||||||
|  |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	pkgconfig.ExtendGinMeta(ctx, b, g, req) | ||||||
|  |  | ||||||
| 	b.containsGinData = true | 	b.containsGinData = true | ||||||
| 	return b | 	return b | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (b *Builder) CtxData(method Method, ctx context.Context) *Builder { | ||||||
|  | 	pkgconfig.ExtendContextMeta(b, method, ctx) | ||||||
|  |  | ||||||
|  | 	b.containsContextData = true | ||||||
|  |  | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
| func formatHeader(header map[string][]string) string { | func formatHeader(header map[string][]string) string { | ||||||
| 	ml := 1 | 	ml := 1 | ||||||
| 	for k, _ := range header { | 	for k, _ := range header { | ||||||
| @@ -401,16 +421,26 @@ func extractHeader(header map[string][]string) []string { | |||||||
|  |  | ||||||
| // Build creates a new error, ready to pass up the stack | // Build creates a new error, ready to pass up the stack | ||||||
| // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | ||||||
| func (b *Builder) Build() error { | // Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces | ||||||
|  | // Can be locally suppressed with Builder.NoLog() | ||||||
|  | func (b *Builder) Build(ctxs ...context.Context) error { | ||||||
| 	warnOnPkgConfigNotInitialized() | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
| 	if pkgconfig.ZeroLogErrTraces && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | 	for _, dctx := range ctxs { | ||||||
|  | 		b.CtxData(MethodBuild, dctx) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil { | ||||||
|  | 		return b.wrappedErr | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} else if pkgconfig.ZeroLogAllTraces { | 	} else if pkgconfig.ZeroLogAllTraces && !b.noLog { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodBuild) | 	b.errorData.CallListener(MethodBuild) | ||||||
|  |  | ||||||
| 	return b.errorData | 	return b.errorData | ||||||
| } | } | ||||||
| @@ -424,27 +454,35 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) { | |||||||
| 		b.GinReq(ctx, g, g.Request) | 		b.GinReq(ctx, g, g.Request) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	b.CtxData(MethodOutput, ctx) | ||||||
|  |  | ||||||
| 	b.errorData.Output(g) | 	b.errorData.Output(g) | ||||||
|  |  | ||||||
| 	if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal { | 	if (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) && (pkgconfig.ZeroLogErrGinOutput || pkgconfig.ZeroLogAllGinOutput) { | ||||||
| 		b.errorData.Log(stackSkipLogger.Error()) | 		b.errorData.Log(stackSkipLogger.Error()) | ||||||
| 	} else if b.errorData.Severity == SevWarn { | 	} else if (b.errorData.Severity == SevWarn) && (pkgconfig.ZeroLogAllGinOutput) { | ||||||
| 		b.errorData.Log(stackSkipLogger.Warn()) | 		b.errorData.Log(stackSkipLogger.Warn()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodOutput) | 	b.errorData.CallListener(MethodOutput) | ||||||
| } | } | ||||||
|  |  | ||||||
| // Print prints the error | // Print prints the error | ||||||
| // If the error is SevErr we also send it to the error-service | // If the error is SevErr we also send it to the error-service | ||||||
| func (b *Builder) Print() { | func (b *Builder) Print(ctxs ...context.Context) { | ||||||
|  | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
|  | 	for _, dctx := range ctxs { | ||||||
|  | 		b.CtxData(MethodPrint, dctx) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal { | 	if b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal { | ||||||
| 		b.errorData.Log(stackSkipLogger.Error()) | 		b.errorData.Log(stackSkipLogger.Error()) | ||||||
| 	} else if b.errorData.Severity == SevWarn { | 	} else if b.errorData.Severity == SevWarn { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Warn()) | 		b.errorData.ShortLog(stackSkipLogger.Warn()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodPrint) | 	b.errorData.CallListener(MethodPrint) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) Format(level LogPrintLevel) string { | func (b *Builder) Format(level LogPrintLevel) string { | ||||||
| @@ -453,11 +491,17 @@ func (b *Builder) Format(level LogPrintLevel) string { | |||||||
|  |  | ||||||
| // Fatal prints the error and terminates the program | // Fatal prints the error and terminates the program | ||||||
| // If the error is SevErr we also send it to the error-service | // If the error is SevErr we also send it to the error-service | ||||||
| func (b *Builder) Fatal() { | func (b *Builder) Fatal(ctxs ...context.Context) { | ||||||
|  |  | ||||||
| 	b.errorData.Severity = SevFatal | 	b.errorData.Severity = SevFatal | ||||||
|  |  | ||||||
|  | 	for _, dctx := range ctxs { | ||||||
|  | 		b.CtxData(MethodFatal, dctx) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | ||||||
|  |  | ||||||
| 	b.CallListener(MethodFatal) | 	b.errorData.CallListener(MethodFatal) | ||||||
|  |  | ||||||
| 	os.Exit(1) | 	os.Exit(1) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -56,7 +56,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE | |||||||
| 		UniqueID:       newID(), | 		UniqueID:       newID(), | ||||||
| 		Category:       cat, | 		Category:       cat, | ||||||
| 		Type:           TypeWrap, | 		Type:           TypeWrap, | ||||||
| 		Severity:       SevErr, | 		Severity:       e.Severity, | ||||||
| 		Timestamp:      time.Now(), | 		Timestamp:      time.Now(), | ||||||
| 		StatusCode:     e.StatusCode, | 		StatusCode:     e.StatusCode, | ||||||
| 		Message:        msg, | 		Message:        msg, | ||||||
| @@ -181,7 +181,7 @@ func getReflectedMetaValues(value interface{}, remainingDepth int) map[string]Me | |||||||
|  |  | ||||||
| 	jsonval, err := json.Marshal(value) | 	jsonval, err := json.Marshal(value) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		panic(err) // gets recovered later up | 		return map[string]MetaValue{"": {DataType: MDTString, Value: fmt.Sprintf("Failed to Marshal %T:\n%+v", value, value)}} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}} | 	return map[string]MetaValue{"": {DataType: MDTString, Value: string(jsonval)}} | ||||||
|   | |||||||
| @@ -1,72 +1,14 @@ | |||||||
| package exerr | package exerr | ||||||
|  |  | ||||||
| import ( | type Method string | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | const ( | ||||||
|  | 	MethodOutput Method = "OUTPUT" | ||||||
|  | 	MethodPrint  Method = "PRINT" | ||||||
|  | 	MethodBuild  Method = "BUILD" | ||||||
|  | 	MethodFatal  Method = "FATAL" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type ErrorCategory struct{ Category string } |  | ||||||
|  |  | ||||||
| var ( |  | ||||||
| 	CatWrap    = ErrorCategory{"Wrap"}    // The error is simply wrapping another error (e.g. when a grpc call returns an error) |  | ||||||
| 	CatSystem  = ErrorCategory{"System"}  // An internal system error (e.g. connection to db failed) |  | ||||||
| 	CatUser    = ErrorCategory{"User"}    // The user (the API caller) did something wrong (e.g. he has no permissions to do this) |  | ||||||
| 	CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| //goland:noinspection GoUnusedGlobalVariable |  | ||||||
| var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign} |  | ||||||
|  |  | ||||||
| type ErrorSeverity struct{ Severity string } |  | ||||||
|  |  | ||||||
| var ( |  | ||||||
| 	SevTrace = ErrorSeverity{"Trace"} |  | ||||||
| 	SevDebug = ErrorSeverity{"Debug"} |  | ||||||
| 	SevInfo  = ErrorSeverity{"Info"} |  | ||||||
| 	SevWarn  = ErrorSeverity{"Warn"} |  | ||||||
| 	SevErr   = ErrorSeverity{"Err"} |  | ||||||
| 	SevFatal = ErrorSeverity{"Fatal"} |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| //goland:noinspection GoUnusedGlobalVariable |  | ||||||
| var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal} |  | ||||||
|  |  | ||||||
| type ErrorType struct { |  | ||||||
| 	Key               string |  | ||||||
| 	DefaultStatusCode *int |  | ||||||
| } |  | ||||||
|  |  | ||||||
| //goland:noinspection GoUnusedGlobalVariable |  | ||||||
| var ( |  | ||||||
| 	TypeInternal       = NewType("INTERNAL_ERROR", langext.Ptr(500)) |  | ||||||
| 	TypePanic          = NewType("PANIC", langext.Ptr(500)) |  | ||||||
| 	TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500)) |  | ||||||
|  |  | ||||||
| 	TypeWrap = NewType("Wrap", nil) |  | ||||||
|  |  | ||||||
| 	TypeBindFailURI      = NewType("BINDFAIL_URI", langext.Ptr(400)) |  | ||||||
| 	TypeBindFailQuery    = NewType("BINDFAIL_QUERY", langext.Ptr(400)) |  | ||||||
| 	TypeBindFailJSON     = NewType("BINDFAIL_JSON", langext.Ptr(400)) |  | ||||||
| 	TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400)) |  | ||||||
| 	TypeBindFailHeader   = NewType("BINDFAIL_HEADER", langext.Ptr(400)) |  | ||||||
|  |  | ||||||
| 	TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401)) |  | ||||||
| 	TypeAuthFailed   = NewType("AUTH_FAILED", langext.Ptr(401)) |  | ||||||
|  |  | ||||||
| 	// other values come from pkgconfig |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| var registeredTypes = dataext.SyncSet[string]{} |  | ||||||
|  |  | ||||||
| func NewType(key string, defStatusCode *int) ErrorType { |  | ||||||
| 	insertOkay := registeredTypes.Add(key) |  | ||||||
| 	if !insertOkay { |  | ||||||
| 		panic("Cannot register same ErrType ('" + key + "') more than once") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return ErrorType{key, defStatusCode} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type LogPrintLevel string | type LogPrintLevel string | ||||||
|  |  | ||||||
| const ( | const ( | ||||||
|   | |||||||
							
								
								
									
										89
									
								
								exerr/dataCategory.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										89
									
								
								exerr/dataCategory.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,89 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ErrorCategory struct{ Category string } | ||||||
|  |  | ||||||
|  | var ( | ||||||
|  | 	CatWrap    = ErrorCategory{"Wrap"}    // The error is simply wrapping another error (e.g. when a grpc call returns an error) | ||||||
|  | 	CatSystem  = ErrorCategory{"System"}  // An internal system error (e.g. connection to db failed) | ||||||
|  | 	CatUser    = ErrorCategory{"User"}    // The user (the API caller) did something wrong (e.g. he has no permissions to do this) | ||||||
|  | 	CatForeign = ErrorCategory{"Foreign"} // A foreign error that some component threw (e.g. an unknown mongodb error), happens if we call Wrap(..) on an non-bmerror value | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (e *ErrorCategory) UnmarshalJSON(bytes []byte) error { | ||||||
|  | 	return json.Unmarshal(bytes, &e.Category) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorCategory) MarshalJSON() ([]byte, error) { | ||||||
|  | 	return json.Marshal(e.Category) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e *ErrorCategory) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*e = ErrorCategory{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeString { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt string | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*e = ErrorCategory{tt} | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorCategory) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(e.Category) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorCategory) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = e.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&e)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(e)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | //goland:noinspection GoUnusedGlobalVariable | ||||||
|  | var AllCategories = []ErrorCategory{CatWrap, CatSystem, CatUser, CatForeign} | ||||||
							
								
								
									
										91
									
								
								exerr/dataSeverity.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										91
									
								
								exerr/dataSeverity.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,91 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ErrorSeverity struct{ Severity string } | ||||||
|  |  | ||||||
|  | var ( | ||||||
|  | 	SevTrace = ErrorSeverity{"Trace"} | ||||||
|  | 	SevDebug = ErrorSeverity{"Debug"} | ||||||
|  | 	SevInfo  = ErrorSeverity{"Info"} | ||||||
|  | 	SevWarn  = ErrorSeverity{"Warn"} | ||||||
|  | 	SevErr   = ErrorSeverity{"Err"} | ||||||
|  | 	SevFatal = ErrorSeverity{"Fatal"} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (e *ErrorSeverity) UnmarshalJSON(bytes []byte) error { | ||||||
|  | 	return json.Unmarshal(bytes, &e.Severity) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorSeverity) MarshalJSON() ([]byte, error) { | ||||||
|  | 	return json.Marshal(e.Severity) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e *ErrorSeverity) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*e = ErrorSeverity{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeString { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt string | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*e = ErrorSeverity{tt} | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorSeverity) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(e.Severity) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorSeverity) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = e.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&e)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(e)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | //goland:noinspection GoUnusedGlobalVariable | ||||||
|  | var AllSeverities = []ErrorSeverity{SevTrace, SevDebug, SevInfo, SevWarn, SevErr, SevFatal} | ||||||
							
								
								
									
										155
									
								
								exerr/dataType.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										155
									
								
								exerr/dataType.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,155 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"reflect" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ErrorType struct { | ||||||
|  | 	Key               string | ||||||
|  | 	DefaultStatusCode *int | ||||||
|  | } | ||||||
|  |  | ||||||
|  | //goland:noinspection GoUnusedGlobalVariable | ||||||
|  | var ( | ||||||
|  | 	TypeInternal       = NewType("INTERNAL_ERROR", langext.Ptr(500)) | ||||||
|  | 	TypePanic          = NewType("PANIC", langext.Ptr(500)) | ||||||
|  | 	TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500)) | ||||||
|  |  | ||||||
|  | 	TypeMongoQuery        = NewType("MONGO_QUERY", langext.Ptr(500)) | ||||||
|  | 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | ||||||
|  | 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) | ||||||
|  | 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | ||||||
|  | 	TypeMongoInvalidOpt   = NewType("MONGO_INVALIDOPT", langext.Ptr(500)) | ||||||
|  |  | ||||||
|  | 	TypeSQLQuery  = NewType("SQL_QUERY", langext.Ptr(500)) | ||||||
|  | 	TypeSQLBuild  = NewType("SQL_BUILD", langext.Ptr(500)) | ||||||
|  | 	TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500)) | ||||||
|  |  | ||||||
|  | 	TypeWrap = NewType("Wrap", nil) | ||||||
|  |  | ||||||
|  | 	TypeBindFailURI      = NewType("BINDFAIL_URI", langext.Ptr(400)) | ||||||
|  | 	TypeBindFailQuery    = NewType("BINDFAIL_QUERY", langext.Ptr(400)) | ||||||
|  | 	TypeBindFailJSON     = NewType("BINDFAIL_JSON", langext.Ptr(400)) | ||||||
|  | 	TypeBindFailFormData = NewType("BINDFAIL_FORMDATA", langext.Ptr(400)) | ||||||
|  | 	TypeBindFailHeader   = NewType("BINDFAIL_HEADER", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	TypeMarshalEntityID = NewType("MARSHAL_ENTITY_ID", langext.Ptr(400)) | ||||||
|  | 	TypeInvalidCSID     = NewType("INVALID_CSID", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	TypeGoogleStatuscode = NewType("GOOGLE_STATUSCODE", langext.Ptr(400)) | ||||||
|  | 	TypeGoogleResponse   = NewType("GOOGLE_RESPONSE", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	TypeUnauthorized = NewType("UNAUTHORIZED", langext.Ptr(401)) | ||||||
|  | 	TypeAuthFailed   = NewType("AUTH_FAILED", langext.Ptr(401)) | ||||||
|  |  | ||||||
|  | 	TypeInvalidImage    = NewType("IMAGEEXT_INVALID_IMAGE", langext.Ptr(400)) | ||||||
|  | 	TypeInvalidMimeType = NewType("IMAGEEXT_INVALID_MIMETYPE", langext.Ptr(400)) | ||||||
|  |  | ||||||
|  | 	// other values come from the downstream application that uses goext | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func (e *ErrorType) UnmarshalJSON(bytes []byte) error { | ||||||
|  | 	var k string | ||||||
|  | 	err := json.Unmarshal(bytes, &k) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if d, ok := registeredTypes.Get(k); ok { | ||||||
|  | 		*e = d | ||||||
|  | 		return nil | ||||||
|  | 	} else { | ||||||
|  | 		*e = ErrorType{k, nil} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorType) MarshalJSON() ([]byte, error) { | ||||||
|  | 	return json.Marshal(e.Key) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e *ErrorType) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*e = ErrorType{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeString { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into String", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt string | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if d, ok := registeredTypes.Get(tt); ok { | ||||||
|  | 		*e = d | ||||||
|  | 		return nil | ||||||
|  | 	} else { | ||||||
|  | 		*e = ErrorType{tt, nil} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorType) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(e.Key) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ErrorType) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = e.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&e)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(e)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var registeredTypes = dataext.SyncMap[string, ErrorType]{} | ||||||
|  |  | ||||||
|  | func NewType(key string, defStatusCode *int) ErrorType { | ||||||
|  | 	et := ErrorType{key, defStatusCode} | ||||||
|  |  | ||||||
|  | 	registeredTypes.Set(key, et) | ||||||
|  |  | ||||||
|  | 	return et | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ListRegisteredTypes() []ErrorType { | ||||||
|  | 	return registeredTypes.GetAllValues() | ||||||
|  | } | ||||||
							
								
								
									
										153
									
								
								exerr/data_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										153
									
								
								exerr/data_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,153 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestJSONMarshalErrorCategory(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	c1 := CatSystem | ||||||
|  |  | ||||||
|  | 	jsonbin := tst.Must(json.Marshal(c1))(t) | ||||||
|  |  | ||||||
|  | 	var c2 ErrorCategory | ||||||
|  | 	tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2)) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1, c2) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, string(jsonbin), "\"System\"") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestJSONMarshalErrorSeverity(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	c1 := SevErr | ||||||
|  |  | ||||||
|  | 	jsonbin := tst.Must(json.Marshal(c1))(t) | ||||||
|  |  | ||||||
|  | 	var c2 ErrorSeverity | ||||||
|  | 	tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2)) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1, c2) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, string(jsonbin), "\"Err\"") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestJSONMarshalErrorType(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	c1 := TypeNotImplemented | ||||||
|  |  | ||||||
|  | 	jsonbin := tst.Must(json.Marshal(c1))(t) | ||||||
|  |  | ||||||
|  | 	var c2 ErrorType | ||||||
|  | 	tst.AssertNoErr(t, json.Unmarshal(jsonbin, &c2)) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1, c2) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, string(jsonbin), "\"NOT_IMPLEMENTED\"") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestBSONMarshalErrorCategory(t *testing.T) { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	client, err := mongo.Connect(ctx) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	err = client.Ping(ctx, nil) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	primimd := primitive.NewObjectID() | ||||||
|  |  | ||||||
|  | 	_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": CatSystem}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}}) | ||||||
|  |  | ||||||
|  | 	var c1 struct { | ||||||
|  | 		ID  primitive.ObjectID `bson:"_id"` | ||||||
|  | 		Val ErrorCategory      `bson:"val"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = cursor.Decode(&c1) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1.Val, CatSystem) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestBSONMarshalErrorSeverity(t *testing.T) { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	client, err := mongo.Connect(ctx) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	err = client.Ping(ctx, nil) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	primimd := primitive.NewObjectID() | ||||||
|  |  | ||||||
|  | 	_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": SevErr}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}}) | ||||||
|  |  | ||||||
|  | 	var c1 struct { | ||||||
|  | 		ID  primitive.ObjectID `bson:"_id"` | ||||||
|  | 		Val ErrorSeverity      `bson:"val"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = cursor.Decode(&c1) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1.Val, SevErr) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestBSONMarshalErrorType(t *testing.T) { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), 350*time.Millisecond) | ||||||
|  | 	defer cancel() | ||||||
|  |  | ||||||
|  | 	client, err := mongo.Connect(ctx) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	err = client.Ping(ctx, nil) | ||||||
|  | 	if err != nil { | ||||||
|  | 		t.Skip("Skip test - no local mongo found") | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	primimd := primitive.NewObjectID() | ||||||
|  |  | ||||||
|  | 	_, err = client.Database("_test").Collection("goext-cicd").InsertOne(ctx, bson.M{"_id": primimd, "val": TypeNotImplemented}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	cursor := client.Database("_test").Collection("goext-cicd").FindOne(ctx, bson.M{"_id": primimd, "val": bson.M{"$type": "string"}}) | ||||||
|  |  | ||||||
|  | 	var c1 struct { | ||||||
|  | 		ID  primitive.ObjectID `bson:"_id"` | ||||||
|  | 		Val ErrorType          `bson:"val"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = cursor.Decode(&c1) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, c1.Val, TypeNotImplemented) | ||||||
|  | } | ||||||
| @@ -1,8 +1,11 @@ | |||||||
| package exerr | package exerr | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"context" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"net/http" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type ErrorPackageConfig struct { | type ErrorPackageConfig struct { | ||||||
| @@ -13,16 +16,26 @@ type ErrorPackageConfig struct { | |||||||
| 	IncludeMetaInGinOutput bool                                                                     // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output() | 	IncludeMetaInGinOutput bool                                                                     // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output() | ||||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any)                                    // (Optionally) extend the gin output with more fields | 	ExtendGinOutput        func(err *ExErr, json map[string]any)                                    // (Optionally) extend the gin output with more fields | ||||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any)                         // (Optionally) extend the gin `__data` output with more fields | 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any)                         // (Optionally) extend the gin `__data` output with more fields | ||||||
|  | 	DisableErrorWrapping   bool                                                                     // Disables the exerr.Wrap()...Build() function - will always return the original error | ||||||
|  | 	ZeroLogErrGinOutput    bool                                                                     // autom print zerolog logs on ginext.Error() / .Output(gin)  (for SevErr and SevFatal) | ||||||
|  | 	ZeroLogAllGinOutput    bool                                                                     // autom print zerolog logs on ginext.Error() / .Output(gin)  (for all Severities) | ||||||
|  | 	ExtendGinMeta          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) // (Optionally) extend the final error meta values with additional data from the gin context (a few are automatically added, here more can be included) | ||||||
|  | 	ExtendContextMeta      func(b *Builder, method Method, dctx context.Context)                    // (Optionally) extend the final error meta values with additional data from the context (a few are automatically added, here more can be included) | ||||||
| } | } | ||||||
|  |  | ||||||
| type ErrorPackageConfigInit struct { | type ErrorPackageConfigInit struct { | ||||||
| 	ZeroLogErrTraces       bool | 	ZeroLogErrTraces       *bool | ||||||
| 	ZeroLogAllTraces       bool | 	ZeroLogAllTraces       *bool | ||||||
| 	RecursiveErrors        bool | 	RecursiveErrors        *bool | ||||||
| 	ExtendedGinOutput      bool | 	ExtendedGinOutput      *bool | ||||||
| 	IncludeMetaInGinOutput bool | 	IncludeMetaInGinOutput *bool | ||||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any) | 	ExtendGinOutput        func(err *ExErr, json map[string]any) | ||||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | ||||||
|  | 	DisableErrorWrapping   *bool | ||||||
|  | 	ZeroLogErrGinOutput    *bool | ||||||
|  | 	ZeroLogAllGinOutput    *bool | ||||||
|  | 	ExtendGinMeta          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) | ||||||
|  | 	ExtendContextMeta      func(b *Builder, method Method, dctx context.Context) | ||||||
| } | } | ||||||
|  |  | ||||||
| var initialized = false | var initialized = false | ||||||
| @@ -35,6 +48,11 @@ var pkgconfig = ErrorPackageConfig{ | |||||||
| 	IncludeMetaInGinOutput: true, | 	IncludeMetaInGinOutput: true, | ||||||
| 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | ||||||
| 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | ||||||
|  | 	DisableErrorWrapping:   false, | ||||||
|  | 	ZeroLogErrGinOutput:    true, | ||||||
|  | 	ZeroLogAllGinOutput:    false, | ||||||
|  | 	ExtendGinMeta:          func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {}, | ||||||
|  | 	ExtendContextMeta:      func(b *Builder, method Method, dctx context.Context) {}, | ||||||
| } | } | ||||||
|  |  | ||||||
| // Init initializes the exerr packages | // Init initializes the exerr packages | ||||||
| @@ -47,6 +65,8 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
|  |  | ||||||
| 	ego := func(err *ExErr, json map[string]any) {} | 	ego := func(err *ExErr, json map[string]any) {} | ||||||
| 	egdo := func(err *ExErr, depth int, json map[string]any) {} | 	egdo := func(err *ExErr, depth int, json map[string]any) {} | ||||||
|  | 	egm := func(ctx context.Context, b *Builder, g *gin.Context, req *http.Request) {} | ||||||
|  | 	egcm := func(b *Builder, method Method, dctx context.Context) {} | ||||||
|  |  | ||||||
| 	if cfg.ExtendGinOutput != nil { | 	if cfg.ExtendGinOutput != nil { | ||||||
| 		ego = cfg.ExtendGinOutput | 		ego = cfg.ExtendGinOutput | ||||||
| @@ -54,20 +74,35 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
| 	if cfg.ExtendGinDataOutput != nil { | 	if cfg.ExtendGinDataOutput != nil { | ||||||
| 		egdo = cfg.ExtendGinDataOutput | 		egdo = cfg.ExtendGinDataOutput | ||||||
| 	} | 	} | ||||||
|  | 	if cfg.ExtendGinMeta != nil { | ||||||
|  | 		egm = cfg.ExtendGinMeta | ||||||
|  | 	} | ||||||
|  | 	if cfg.ExtendContextMeta != nil { | ||||||
|  | 		egcm = cfg.ExtendContextMeta | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	pkgconfig = ErrorPackageConfig{ | 	pkgconfig = ErrorPackageConfig{ | ||||||
| 		ZeroLogErrTraces:       cfg.ZeroLogErrTraces, | 		ZeroLogErrTraces:       langext.Coalesce(cfg.ZeroLogErrTraces, pkgconfig.ZeroLogErrTraces), | ||||||
| 		ZeroLogAllTraces:       cfg.ZeroLogAllTraces, | 		ZeroLogAllTraces:       langext.Coalesce(cfg.ZeroLogAllTraces, pkgconfig.ZeroLogAllTraces), | ||||||
| 		RecursiveErrors:        cfg.RecursiveErrors, | 		RecursiveErrors:        langext.Coalesce(cfg.RecursiveErrors, pkgconfig.RecursiveErrors), | ||||||
| 		ExtendedGinOutput:      cfg.ExtendedGinOutput, | 		ExtendedGinOutput:      langext.Coalesce(cfg.ExtendedGinOutput, pkgconfig.ExtendedGinOutput), | ||||||
| 		IncludeMetaInGinOutput: cfg.IncludeMetaInGinOutput, | 		IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput), | ||||||
| 		ExtendGinOutput:        ego, | 		ExtendGinOutput:        ego, | ||||||
| 		ExtendGinDataOutput:    egdo, | 		ExtendGinDataOutput:    egdo, | ||||||
|  | 		DisableErrorWrapping:   langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping), | ||||||
|  | 		ZeroLogAllGinOutput:    langext.Coalesce(cfg.ZeroLogAllGinOutput, pkgconfig.ZeroLogAllGinOutput), | ||||||
|  | 		ZeroLogErrGinOutput:    langext.Coalesce(cfg.ZeroLogErrGinOutput, pkgconfig.ZeroLogErrGinOutput), | ||||||
|  | 		ExtendGinMeta:          egm, | ||||||
|  | 		ExtendContextMeta:      egcm, | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	initialized = true | 	initialized = true | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func Initialized() bool { | ||||||
|  | 	return initialized | ||||||
|  | } | ||||||
|  |  | ||||||
| func warnOnPkgConfigNotInitialized() { | func warnOnPkgConfigNotInitialized() { | ||||||
| 	if !initialized { | 	if !initialized { | ||||||
| 		fmt.Printf("\n") | 		fmt.Printf("\n") | ||||||
|   | |||||||
| @@ -30,7 +30,7 @@ type ExErr struct { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (ee *ExErr) Error() string { | func (ee *ExErr) Error() string { | ||||||
| 	return ee.Message | 	return ee.RecursiveMessage() | ||||||
| } | } | ||||||
|  |  | ||||||
| // Unwrap must be implemented so that some error.XXX methods work | // Unwrap must be implemented so that some error.XXX methods work | ||||||
| @@ -84,6 +84,9 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string { | |||||||
| 	if lvl == LogPrintShort { | 	if lvl == LogPrintShort { | ||||||
|  |  | ||||||
| 		msg := ee.Message | 		msg := ee.Message | ||||||
|  | 		if msg == "" { | ||||||
|  | 			msg = ee.RecursiveMessage() | ||||||
|  | 		} | ||||||
| 		if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign { | 		if ee.OriginalError != nil && ee.OriginalError.Category == CatForeign { | ||||||
| 			msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")" | 			msg = msg + " (" + strings.ReplaceAll(ee.OriginalError.Message, "\n", " ") + ")" | ||||||
| 		} | 		} | ||||||
| @@ -164,19 +167,37 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (ee *ExErr) ShortLog(evt *zerolog.Event) { | func (ee *ExErr) ShortLog(evt *zerolog.Event) { | ||||||
| 	ee.Meta.Apply(evt).Msg(ee.FormatLog(LogPrintShort)) | 	ee.Meta.Apply(evt, langext.Ptr(240)).Msg(ee.FormatLog(LogPrintShort)) | ||||||
| } | } | ||||||
|  |  | ||||||
| // RecursiveMessage returns the message to show | // RecursiveMessage returns the message to show | ||||||
| // = first error (top-down) that is not wrapping/foreign/empty | // = first error (top-down) that is not wrapping/foreign/empty | ||||||
|  | // = lowest level error (that is not empty) | ||||||
|  | // = fallback to self.message | ||||||
| func (ee *ExErr) RecursiveMessage() string { | func (ee *ExErr) RecursiveMessage() string { | ||||||
|  |  | ||||||
|  | 	// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty | ||||||
|  |  | ||||||
| 	for curr := ee; curr != nil; curr = curr.OriginalError { | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
| 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | ||||||
| 			return curr.Message | 			return curr.Message | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// fallback to self | 	// ==== [2] ==== lowest level error (that is not empty) | ||||||
|  |  | ||||||
|  | 	deepestMsg := "" | ||||||
|  | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
|  | 		if curr.Message != "" { | ||||||
|  | 			deepestMsg = curr.Message | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if deepestMsg != "" { | ||||||
|  | 		return deepestMsg | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// ==== [3] ==== fallback to self.message | ||||||
|  |  | ||||||
| 	return ee.Message | 	return ee.Message | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -240,6 +261,73 @@ func (ee *ExErr) Depth() int { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // GetMeta returns the meta value with the specified key | ||||||
|  | // this method recurses through all wrapped errors and returns the first matching meta value | ||||||
|  | func (ee *ExErr) GetMeta(key string) (any, bool) { | ||||||
|  | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
|  | 		if v, ok := curr.Meta[key]; ok { | ||||||
|  | 			return v.Value, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // GetMetaString functions the same as GetMeta, but returns false if the type does not match | ||||||
|  | func (ee *ExErr) GetMetaString(key string) (string, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(string); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaBool(key string) (bool, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(bool); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return false, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaInt(key string) (int, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(int); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return 0, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaFloat32(key string) (float32, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(float32); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return 0, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaFloat64(key string) (float64, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(float64); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return 0, false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) GetMetaTime(key string) (time.Time, bool) { | ||||||
|  | 	if v1, ok := ee.GetMeta(key); ok { | ||||||
|  | 		if v2, ok := v1.(time.Time); ok { | ||||||
|  | 			return v2, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return time.Time{}, false | ||||||
|  | } | ||||||
|  |  | ||||||
| // contains test if the supplied error is contained in this error (anywhere in the chain) | // contains test if the supplied error is contained in this error (anywhere in the chain) | ||||||
| func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) { | func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) { | ||||||
| 	if original == nil { | 	if original == nil { | ||||||
|   | |||||||
| @@ -2,10 +2,19 @@ package exerr | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"errors" | 	"errors" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
| 	"testing" | 	"testing" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | func TestMain(m *testing.M) { | ||||||
|  | 	if !Initialized() { | ||||||
|  | 		Init(ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse}) | ||||||
|  | 	} | ||||||
|  | 	os.Exit(m.Run()) | ||||||
|  | } | ||||||
|  |  | ||||||
| type golangErr struct { | type golangErr struct { | ||||||
| 	Message string | 	Message string | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										19
									
								
								exerr/gin.go
									
									
									
									
									
								
							
							
						
						
									
										19
									
								
								exerr/gin.go
									
									
									
									
									
								
							| @@ -15,10 +15,10 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | |||||||
| 		ginJson["id"] = ee.UniqueID | 		ginJson["id"] = ee.UniqueID | ||||||
| 	} | 	} | ||||||
| 	if ee.Category != CatWrap { | 	if ee.Category != CatWrap { | ||||||
| 		ginJson["category"] = ee.Category | 		ginJson["category"] = ee.Category.Category | ||||||
| 	} | 	} | ||||||
| 	if ee.Type != TypeWrap { | 	if ee.Type != TypeWrap { | ||||||
| 		ginJson["type"] = ee.Type | 		ginJson["type"] = ee.Type.Key | ||||||
| 	} | 	} | ||||||
| 	if ee.StatusCode != nil { | 	if ee.StatusCode != nil { | ||||||
| 		ginJson["statuscode"] = ee.StatusCode | 		ginJson["statuscode"] = ee.StatusCode | ||||||
| @@ -30,7 +30,7 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | |||||||
| 		ginJson["caller"] = ee.Caller | 		ginJson["caller"] = ee.Caller | ||||||
| 	} | 	} | ||||||
| 	if ee.Severity != SevErr { | 	if ee.Severity != SevErr { | ||||||
| 		ginJson["severity"] = ee.Severity | 		ginJson["severity"] = ee.Severity.Severity | ||||||
| 	} | 	} | ||||||
| 	if ee.Timestamp != (time.Time{}) { | 	if ee.Timestamp != (time.Time{}) { | ||||||
| 		ginJson["time"] = ee.Timestamp.Format(time.RFC3339) | 		ginJson["time"] = ee.Timestamp.Format(time.RFC3339) | ||||||
| @@ -57,6 +57,19 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | |||||||
| 	return ginJson | 	return ginJson | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (ee *ExErr) ToDefaultAPIJson() (string, error) { | ||||||
|  |  | ||||||
|  | 	gjr := json.GoJsonRender{Data: ee.ToAPIJson(true, pkgconfig.ExtendedGinOutput, pkgconfig.IncludeMetaInGinOutput), NilSafeSlices: true, NilSafeMaps: true} | ||||||
|  |  | ||||||
|  | 	r, err := gjr.RenderString() | ||||||
|  |  | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| // ToAPIJson converts the ExError to a json object | // ToAPIJson converts the ExError to a json object | ||||||
| // (the same object as used in the Output(gin) method) | // (the same object as used in the Output(gin) method) | ||||||
| // | // | ||||||
|   | |||||||
| @@ -4,15 +4,6 @@ import ( | |||||||
| 	"sync" | 	"sync" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type Method string |  | ||||||
|  |  | ||||||
| const ( |  | ||||||
| 	MethodOutput Method = "OUTPUT" |  | ||||||
| 	MethodPrint  Method = "PRINT" |  | ||||||
| 	MethodBuild  Method = "BUILD" |  | ||||||
| 	MethodFatal  Method = "FATAL" |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| type Listener = func(method Method, v *ExErr) | type Listener = func(method Method, v *ExErr) | ||||||
|  |  | ||||||
| var listenerLock = sync.Mutex{} | var listenerLock = sync.Mutex{} | ||||||
| @@ -25,13 +16,11 @@ func RegisterListener(l Listener) { | |||||||
| 	listener = append(listener, l) | 	listener = append(listener, l) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) CallListener(m Method) { | func (ee *ExErr) CallListener(m Method) { | ||||||
| 	valErr := b.errorData |  | ||||||
|  |  | ||||||
| 	listenerLock.Lock() | 	listenerLock.Lock() | ||||||
| 	defer listenerLock.Unlock() | 	defer listenerLock.Unlock() | ||||||
|  |  | ||||||
| 	for _, v := range listener { | 	for _, v := range listener { | ||||||
| 		v(m, valErr) | 		v(m, ee) | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -9,6 +9,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson" | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"math" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| @@ -217,23 +218,35 @@ func (v MetaValue) ShortString(lim int) string { | |||||||
| 	return "(err)" | 	return "(err)" | ||||||
| } | } | ||||||
|  |  | ||||||
| func (v MetaValue) Apply(key string, evt *zerolog.Event) *zerolog.Event { | func (v MetaValue) Apply(key string, evt *zerolog.Event, limitLen *int) *zerolog.Event { | ||||||
| 	switch v.DataType { | 	switch v.DataType { | ||||||
| 	case MDTString: | 	case MDTString: | ||||||
|  | 		if limitLen == nil { | ||||||
| 			return evt.Str(key, v.Value.(string)) | 			return evt.Str(key, v.Value.(string)) | ||||||
|  | 		} else { | ||||||
|  | 			return evt.Str(key, langext.StrLimit(v.Value.(string), *limitLen, "...")) | ||||||
|  | 		} | ||||||
| 	case MDTID: | 	case MDTID: | ||||||
| 		return evt.Str(key, v.Value.(IDWrap).Value) | 		return evt.Str(key, v.Value.(IDWrap).Value) | ||||||
| 	case MDTAny: | 	case MDTAny: | ||||||
| 		if v.Value.(AnyWrap).IsError { | 		if v.Value.(AnyWrap).IsError { | ||||||
| 			return evt.Str(key, "(err)") | 			return evt.Str(key, "(err)") | ||||||
| 		} else { | 		} else { | ||||||
|  | 			if limitLen == nil { | ||||||
| 				return evt.Str(key, v.Value.(AnyWrap).Json) | 				return evt.Str(key, v.Value.(AnyWrap).Json) | ||||||
|  | 			} else { | ||||||
|  | 				return evt.Str(key, langext.StrLimit(v.Value.(AnyWrap).Json, *limitLen, "...")) | ||||||
|  | 			} | ||||||
| 		} | 		} | ||||||
| 	case MDTStringPtr: | 	case MDTStringPtr: | ||||||
| 		if langext.IsNil(v.Value) { | 		if langext.IsNil(v.Value) { | ||||||
| 			return evt.Str(key, "<<null>>") | 			return evt.Str(key, "<<null>>") | ||||||
| 		} | 		} | ||||||
|  | 		if limitLen == nil { | ||||||
| 			return evt.Str(key, langext.CoalesceString(v.Value.(*string), "<<null>>")) | 			return evt.Str(key, langext.CoalesceString(v.Value.(*string), "<<null>>")) | ||||||
|  | 		} else { | ||||||
|  | 			return evt.Str(key, langext.StrLimit(langext.CoalesceString(v.Value.(*string), "<<null>>"), *limitLen, "...")) | ||||||
|  | 		} | ||||||
| 	case MDTInt: | 	case MDTInt: | ||||||
| 		return evt.Int(key, v.Value.(int)) | 		return evt.Int(key, v.Value.(int)) | ||||||
| 	case MDTInt8: | 	case MDTInt8: | ||||||
| @@ -612,7 +625,18 @@ func (v MetaValue) rawValueForJson() any { | |||||||
| 		if v.Value.(AnyWrap).IsNil { | 		if v.Value.(AnyWrap).IsNil { | ||||||
| 			return nil | 			return nil | ||||||
| 		} | 		} | ||||||
| 		return v.Value.(AnyWrap).Serialize() | 		if v.Value.(AnyWrap).IsError { | ||||||
|  | 			return bson.M{"@error": true} | ||||||
|  | 		} | ||||||
|  | 		jsonobj := primitive.M{} | ||||||
|  | 		jsonarr := primitive.A{} | ||||||
|  | 		if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonobj); err == nil { | ||||||
|  | 			return jsonobj | ||||||
|  | 		} else if err := json.Unmarshal([]byte(v.Value.(AnyWrap).Json), &jsonarr); err == nil { | ||||||
|  | 			return jsonarr | ||||||
|  | 		} else { | ||||||
|  | 			return bson.M{"type": v.Value.(AnyWrap).Type, "data": v.Value.(AnyWrap).Json} | ||||||
|  | 		} | ||||||
| 	} | 	} | ||||||
| 	if v.DataType == MDTID { | 	if v.DataType == MDTID { | ||||||
| 		if v.Value.(IDWrap).IsNil { | 		if v.Value.(IDWrap).IsNil { | ||||||
| @@ -644,6 +668,28 @@ func (v MetaValue) rawValueForJson() any { | |||||||
| 		} | 		} | ||||||
| 		return v.Value.(EnumWrap).ValueString | 		return v.Value.(EnumWrap).ValueString | ||||||
| 	} | 	} | ||||||
|  | 	if v.DataType == MDTFloat32 { | ||||||
|  | 		if math.IsNaN(float64(v.Value.(float32))) { | ||||||
|  | 			return "float64::NaN" | ||||||
|  | 		} else if math.IsInf(float64(v.Value.(float32)), +1) { | ||||||
|  | 			return "float64::+inf" | ||||||
|  | 		} else if math.IsInf(float64(v.Value.(float32)), -1) { | ||||||
|  | 			return "float64::-inf" | ||||||
|  | 		} else { | ||||||
|  | 			return v.Value | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if v.DataType == MDTFloat64 { | ||||||
|  | 		if math.IsNaN(v.Value.(float64)) { | ||||||
|  | 			return "float64::NaN" | ||||||
|  | 		} else if math.IsInf(v.Value.(float64), +1) { | ||||||
|  | 			return "float64::+inf" | ||||||
|  | 		} else if math.IsInf(v.Value.(float64), -1) { | ||||||
|  | 			return "float64::-inf" | ||||||
|  | 		} else { | ||||||
|  | 			return v.Value | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
| 	return v.Value | 	return v.Value | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -691,9 +737,9 @@ func (mm MetaMap) Any() bool { | |||||||
| 	return len(mm) > 0 | 	return len(mm) > 0 | ||||||
| } | } | ||||||
|  |  | ||||||
| func (mm MetaMap) Apply(evt *zerolog.Event) *zerolog.Event { | func (mm MetaMap) Apply(evt *zerolog.Event, limitLen *int) *zerolog.Event { | ||||||
| 	for key, val := range mm { | 	for key, val := range mm { | ||||||
| 		evt = val.Apply(key, evt) | 		evt = val.Apply(key, evt, limitLen) | ||||||
| 	} | 	} | ||||||
| 	return evt | 	return evt | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										36
									
								
								fsext/exists.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								fsext/exists.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | |||||||
|  | package fsext | ||||||
|  |  | ||||||
|  | import "os" | ||||||
|  |  | ||||||
|  | func PathExists(fp string) (bool, error) { | ||||||
|  | 	_, err := os.Stat(fp) | ||||||
|  | 	if err == nil { | ||||||
|  | 		return true, nil | ||||||
|  | 	} | ||||||
|  | 	if os.IsNotExist(err) { | ||||||
|  | 		return false, nil | ||||||
|  | 	} | ||||||
|  | 	return false, err | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func FileExists(fp string) (bool, error) { | ||||||
|  | 	stat, err := os.Stat(fp) | ||||||
|  | 	if err == nil { | ||||||
|  | 		return !stat.IsDir(), nil | ||||||
|  | 	} | ||||||
|  | 	if os.IsNotExist(err) { | ||||||
|  | 		return false, nil | ||||||
|  | 	} | ||||||
|  | 	return false, err | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func DirectoryExists(fp string) (bool, error) { | ||||||
|  | 	stat, err := os.Stat(fp) | ||||||
|  | 	if err == nil { | ||||||
|  | 		return stat.IsDir(), nil | ||||||
|  | 	} | ||||||
|  | 	if os.IsNotExist(err) { | ||||||
|  | 		return false, nil | ||||||
|  | 	} | ||||||
|  | 	return false, err | ||||||
|  | } | ||||||
							
								
								
									
										121
									
								
								ginext/engine.go
									
									
									
									
									
								
							
							
						
						
									
										121
									
								
								ginext/engine.go
									
									
									
									
									
								
							| @@ -6,20 +6,26 @@ import ( | |||||||
| 	"github.com/rs/zerolog/log" | 	"github.com/rs/zerolog/log" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/mathext" | 	"gogs.mikescher.com/BlackForestBytes/goext/mathext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| 	"net" | 	"net" | ||||||
| 	"net/http" | 	"net/http" | ||||||
|  | 	"net/http/httptest" | ||||||
|  | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type GinWrapper struct { | type GinWrapper struct { | ||||||
| 	engine          *gin.Engine | 	engine          *gin.Engine | ||||||
| 	SuppressGinLogs bool | 	suppressGinLogs bool | ||||||
|  |  | ||||||
|  | 	opt                   Options | ||||||
| 	allowCors             bool | 	allowCors             bool | ||||||
| 	ginDebug              bool | 	ginDebug              bool | ||||||
| 	bufferBody            bool | 	bufferBody            bool | ||||||
| 	requestTimeout        time.Duration | 	requestTimeout        time.Duration | ||||||
|  | 	listenerBeforeRequest []func(g *gin.Context) | ||||||
|  | 	listenerAfterRequest  []func(g *gin.Context, resp HTTPResponse) | ||||||
|  |  | ||||||
| 	routeSpecs []ginRouteSpec | 	routeSpecs []ginRouteSpec | ||||||
| } | } | ||||||
| @@ -31,45 +37,57 @@ type ginRouteSpec struct { | |||||||
| 	Handler     string | 	Handler     string | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type Options struct { | ||||||
|  | 	AllowCors                *bool                                     // Add cors handler to allow all CORS requests on the default http methods | ||||||
|  | 	GinDebug                 *bool                                     // Set gin.debug to true (adds more logs) | ||||||
|  | 	SuppressGinLogs          *bool                                     // Suppress our custom gin logs (even if GinDebug == true) | ||||||
|  | 	BufferBody               *bool                                     // Buffers the input body stream, this way the ginext error handler can later include the whole request body | ||||||
|  | 	Timeout                  *time.Duration                            // The default handler timeout | ||||||
|  | 	ListenerBeforeRequest    []func(g *gin.Context)                    // Register listener that are called before the handler method | ||||||
|  | 	ListenerAfterRequest     []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method | ||||||
|  | 	DebugTrimHandlerPrefixes []string                                  // Trim these prefixes from the handler names in the debug print | ||||||
|  | 	DebugReplaceHandlerNames map[string]string                         // Replace handler names in debug output | ||||||
|  | } | ||||||
|  |  | ||||||
| // NewEngine creates a new (wrapped) ginEngine | // NewEngine creates a new (wrapped) ginEngine | ||||||
| // Parameters are: | func NewEngine(opt Options) *GinWrapper { | ||||||
| // - [allowCors]    Add cors handler to allow all CORS requests on the default http methods |  | ||||||
| // - [ginDebug]     Set gin.debug to true (adds more logs) |  | ||||||
| // - [bufferBody]   Buffers the input body stream, this way the ginext error handler can later include the whole request body |  | ||||||
| // - [timeout]      The default handler timeout |  | ||||||
| func NewEngine(allowCors bool, ginDebug bool, bufferBody bool, timeout time.Duration) *GinWrapper { |  | ||||||
| 	engine := gin.New() | 	engine := gin.New() | ||||||
|  |  | ||||||
| 	wrapper := &GinWrapper{ | 	wrapper := &GinWrapper{ | ||||||
| 		engine:                engine, | 		engine:                engine, | ||||||
| 		SuppressGinLogs: false, | 		opt:                   opt, | ||||||
| 		allowCors:       allowCors, | 		suppressGinLogs:       langext.Coalesce(opt.SuppressGinLogs, false), | ||||||
| 		ginDebug:        ginDebug, | 		allowCors:             langext.Coalesce(opt.AllowCors, false), | ||||||
| 		bufferBody:      bufferBody, | 		ginDebug:              langext.Coalesce(opt.GinDebug, true), | ||||||
| 		requestTimeout:  timeout, | 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | ||||||
|  | 		requestTimeout:        langext.Coalesce(opt.Timeout, 24*time.Hour), | ||||||
|  | 		listenerBeforeRequest: opt.ListenerBeforeRequest, | ||||||
|  | 		listenerAfterRequest:  opt.ListenerAfterRequest, | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	engine.RedirectFixedPath = false | 	engine.RedirectFixedPath = false | ||||||
| 	engine.RedirectTrailingSlash = false | 	engine.RedirectTrailingSlash = false | ||||||
|  |  | ||||||
| 	if allowCors { | 	if wrapper.allowCors { | ||||||
| 		engine.Use(CorsMiddleware()) | 		engine.Use(CorsMiddleware()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if wrapper.ginDebug { | ||||||
|  | 		gin.SetMode(gin.DebugMode) | ||||||
|  |  | ||||||
| 		// do not debug-print routes | 		// do not debug-print routes | ||||||
| 		gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {} | 		gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {} | ||||||
|  |  | ||||||
| 	if ginDebug { | 		if !wrapper.suppressGinLogs { | ||||||
|  | 			ginlogger := gin.Logger() | ||||||
|  | 			engine.Use(func(context *gin.Context) { ginlogger(context) }) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
| 		gin.SetMode(gin.ReleaseMode) | 		gin.SetMode(gin.ReleaseMode) | ||||||
|  |  | ||||||
| 		ginlogger := gin.Logger() | 		// do not debug-print routes | ||||||
| 		engine.Use(func(context *gin.Context) { | 		gin.DebugPrintRouteFunc = func(_, _, _ string, _ int) {} | ||||||
| 			if !wrapper.SuppressGinLogs { |  | ||||||
| 				ginlogger(context) |  | ||||||
| 			} |  | ||||||
| 		}) |  | ||||||
| 	} else { |  | ||||||
| 		gin.SetMode(gin.DebugMode) |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return wrapper | 	return wrapper | ||||||
| @@ -126,8 +144,8 @@ func (w *GinWrapper) DebugPrintRoutes() { | |||||||
| 		line := [4]string{ | 		line := [4]string{ | ||||||
| 			spec.Method, | 			spec.Method, | ||||||
| 			spec.URL, | 			spec.URL, | ||||||
| 			strings.Join(spec.Middlewares, " -> "), | 			strings.Join(langext.ArrMap(spec.Middlewares, w.cleanMiddlewareName), " -> "), | ||||||
| 			spec.Handler, | 			w.cleanMiddlewareName(spec.Handler), | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		lines = append(lines, line) | 		lines = append(lines, line) | ||||||
| @@ -138,12 +156,65 @@ func (w *GinWrapper) DebugPrintRoutes() { | |||||||
| 		pad[3] = mathext.Max(pad[3], len(line[3])) | 		pad[3] = mathext.Max(pad[3], len(line[3])) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	fmt.Printf("Gin-Routes:\n") | ||||||
|  | 	fmt.Printf("{\n") | ||||||
| 	for _, line := range lines { | 	for _, line := range lines { | ||||||
|  |  | ||||||
| 		fmt.Printf("Gin-Route: %s  %s  -->  %s  -->  %s\n", | 		fmt.Printf(" %s  %s  -->  %s  -->  %s\n", | ||||||
| 			langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2), | 			langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2), | ||||||
| 			langext.StrPadRight(line[1], " ", pad[1]), | 			langext.StrPadRight(line[1], " ", pad[1]), | ||||||
| 			langext.StrPadRight(line[2], " ", pad[2]), | 			langext.StrPadRight(line[2], " ", pad[2]), | ||||||
| 			langext.StrPadRight(line[3], " ", pad[3])) | 			langext.StrPadRight(line[3], " ", pad[3])) | ||||||
| 	} | 	} | ||||||
|  | 	fmt.Printf("}\n") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (w *GinWrapper) cleanMiddlewareName(fname string) string { | ||||||
|  |  | ||||||
|  | 	funcSuffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`)) | ||||||
|  | 	if match, ok := funcSuffix.MatchFirst(fname); ok { | ||||||
|  | 		fname = fname[:len(fname)-match.FullMatch().Length()] | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if strings.HasSuffix(fname, ".(*GinRoutesWrapper).WithJSONFilter") { | ||||||
|  | 		fname = "[JSONFilter]" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fname == "ginext.BodyBuffer" { | ||||||
|  | 		fname = "[BodyBuffer]" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	skipPrefixes := []string{"api.(*Handler).", "api.", "ginext.", "handler.", "admin-app.", "employee-app.", "employer-app."} | ||||||
|  | 	for _, pfx := range skipPrefixes { | ||||||
|  | 		if strings.HasPrefix(fname, pfx) { | ||||||
|  | 			fname = fname[len(pfx):] | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, pfx := range w.opt.DebugTrimHandlerPrefixes { | ||||||
|  | 		if strings.HasPrefix(fname, pfx) { | ||||||
|  | 			fname = fname[len(pfx):] | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for k, v := range langext.ForceMap(w.opt.DebugReplaceHandlerNames) { | ||||||
|  | 		if strings.EqualFold(fname, k) { | ||||||
|  | 			fname = v | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return fname | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ServeHTTP only used for unit tests | ||||||
|  | func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder { | ||||||
|  | 	respRec := httptest.NewRecorder() | ||||||
|  | 	w.engine.ServeHTTP(respRec, req) | ||||||
|  | 	return respRec | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ForwardRequest manually inserts a request into this router | ||||||
|  | // = behaves as if the request came from the outside (and writes the response to `writer`) | ||||||
|  | func (w *GinWrapper) ForwardRequest(writer http.ResponseWriter, req *http.Request) { | ||||||
|  | 	w.engine.ServeHTTP(writer, req) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -14,7 +14,17 @@ func Wrap(w *GinWrapper, fn WHandlerFunc) gin.HandlerFunc { | |||||||
|  |  | ||||||
| 		reqctx := g.Request.Context() | 		reqctx := g.Request.Context() | ||||||
|  |  | ||||||
| 		wrap, stackTrace, panicObj := callPanicSafe(fn, PreContext{wrapper: w, ginCtx: g}) | 		pctx := PreContext{ | ||||||
|  | 			wrapper:        w, | ||||||
|  | 			ginCtx:         g, | ||||||
|  | 			persistantData: &preContextData{}, | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		for _, lstr := range w.listenerBeforeRequest { | ||||||
|  | 			lstr(g) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		wrap, stackTrace, panicObj := callPanicSafe(fn, pctx) | ||||||
| 		if panicObj != nil { | 		if panicObj != nil { | ||||||
|  |  | ||||||
| 			fmt.Printf("\n======== ======== STACKTRACE ======== ========\n%s\n======== ======== ======== ========\n\n", stackTrace) | 			fmt.Printf("\n======== ======== STACKTRACE ======== ========\n%s\n======== ======== ======== ========\n\n", stackTrace) | ||||||
| @@ -32,6 +42,17 @@ func Wrap(w *GinWrapper, fn WHandlerFunc) gin.HandlerFunc { | |||||||
| 			panic("Writing in WrapperFunc is not supported") | 			panic("Writing in WrapperFunc is not supported") | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
|  | 		if pctx.persistantData.sessionObj != nil { | ||||||
|  | 			err := pctx.persistantData.sessionObj.Finish(reqctx, wrap) | ||||||
|  | 			if err != nil { | ||||||
|  | 				wrap = Error(exerr.Wrap(err, "Failed to finish session").Any("originalResponse", wrap).Build()) | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		for _, lstr := range w.listenerAfterRequest { | ||||||
|  | 			lstr(g, wrap) | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		if reqctx.Err() == nil { | 		if reqctx.Err() == nil { | ||||||
| 			wrap.Write(g) | 			wrap.Write(g) | ||||||
| 		} | 		} | ||||||
|   | |||||||
| @@ -1,12 +1,15 @@ | |||||||
| package ginext | package ginext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"bytes" | ||||||
| 	"context" | 	"context" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"github.com/gin-gonic/gin/binding" | 	"github.com/gin-gonic/gin/binding" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"io" | ||||||
| 	"runtime/debug" | 	"runtime/debug" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -17,9 +20,15 @@ type PreContext struct { | |||||||
| 	uri            any | 	uri            any | ||||||
| 	query          any | 	query          any | ||||||
| 	body           any | 	body           any | ||||||
|  | 	rawbody        *[]byte | ||||||
| 	form           any | 	form           any | ||||||
| 	header         any | 	header         any | ||||||
| 	timeout        *time.Duration | 	timeout        *time.Duration | ||||||
|  | 	persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type preContextData struct { | ||||||
|  | 	sessionObj SessionObject | ||||||
| } | } | ||||||
|  |  | ||||||
| func (pctx *PreContext) URI(uri any) *PreContext { | func (pctx *PreContext) URI(uri any) *PreContext { | ||||||
| @@ -37,6 +46,11 @@ func (pctx *PreContext) Body(body any) *PreContext { | |||||||
| 	return pctx | 	return pctx | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (pctx *PreContext) RawBody(rawbody *[]byte) *PreContext { | ||||||
|  | 	pctx.rawbody = rawbody | ||||||
|  | 	return pctx | ||||||
|  | } | ||||||
|  |  | ||||||
| func (pctx *PreContext) Form(form any) *PreContext { | func (pctx *PreContext) Form(form any) *PreContext { | ||||||
| 	pctx.form = form | 	pctx.form = form | ||||||
| 	return pctx | 	return pctx | ||||||
| @@ -52,6 +66,11 @@ func (pctx *PreContext) WithTimeout(to time.Duration) *PreContext { | |||||||
| 	return pctx | 	return pctx | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (pctx *PreContext) WithSession(sessionObj SessionObject) *PreContext { | ||||||
|  | 	pctx.persistantData.sessionObj = sessionObj | ||||||
|  | 	return pctx | ||||||
|  | } | ||||||
|  |  | ||||||
| func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||||
| 	if pctx.uri != nil { | 	if pctx.uri != nil { | ||||||
| 		if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil { | 		if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil { | ||||||
| @@ -90,6 +109,23 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	if pctx.rawbody != nil { | ||||||
|  | 		if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok { | ||||||
|  | 			v, err := brc.BufferedAll() | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, nil, langext.Ptr(Error(err)) | ||||||
|  | 			} | ||||||
|  | 			*pctx.rawbody = v | ||||||
|  | 		} else { | ||||||
|  | 			buf := &bytes.Buffer{} | ||||||
|  | 			_, err := io.Copy(buf, pctx.ginCtx.Request.Body) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return nil, nil, langext.Ptr(Error(err)) | ||||||
|  | 			} | ||||||
|  | 			*pctx.rawbody = buf.Bytes() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if pctx.form != nil { | 	if pctx.form != nil { | ||||||
| 		if pctx.ginCtx.ContentType() == "multipart/form-data" { | 		if pctx.ginCtx.ContentType() == "multipart/form-data" { | ||||||
| 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | ||||||
| @@ -116,7 +152,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if pctx.header != nil { | 	if pctx.header != nil { | ||||||
| 		if err := pctx.ginCtx.ShouldBindHeader(pctx.query); err != nil { | 		if err := pctx.ginCtx.ShouldBindHeader(pctx.header); err != nil { | ||||||
| 			err = exerr.Wrap(err, "Failed to read header"). | 			err = exerr.Wrap(err, "Failed to read header"). | ||||||
| 				WithType(exerr.TypeBindFailHeader). | 				WithType(exerr.TypeBindFailHeader). | ||||||
| 				Str("struct_type", fmt.Sprintf("%T", pctx.query)). | 				Str("struct_type", fmt.Sprintf("%T", pctx.query)). | ||||||
| @@ -126,8 +162,17 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout)) | 	ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout)) | ||||||
|  |  | ||||||
| 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | ||||||
|  |  | ||||||
|  | 	if pctx.persistantData.sessionObj != nil { | ||||||
|  | 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx) | ||||||
|  | 		if err != nil { | ||||||
|  | 			actx.Cancel() | ||||||
|  | 			return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build())) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	return actx, pctx.ginCtx, nil | 	return actx, pctx.ginCtx, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,12 +1,20 @@ | |||||||
| package ginext | package ginext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"fmt" |  | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	json "gogs.mikescher.com/BlackForestBytes/goext/gojson" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | type cookieval struct { | ||||||
|  | 	name     string | ||||||
|  | 	value    string | ||||||
|  | 	maxAge   int | ||||||
|  | 	path     string | ||||||
|  | 	domain   string | ||||||
|  | 	secure   bool | ||||||
|  | 	httpOnly bool | ||||||
|  | } | ||||||
|  |  | ||||||
| type headerval struct { | type headerval struct { | ||||||
| 	Key string | 	Key string | ||||||
| 	Val string | 	Val string | ||||||
| @@ -15,200 +23,17 @@ type headerval struct { | |||||||
| type HTTPResponse interface { | type HTTPResponse interface { | ||||||
| 	Write(g *gin.Context) | 	Write(g *gin.Context) | ||||||
| 	WithHeader(k string, v string) HTTPResponse | 	WithHeader(k string, v string) HTTPResponse | ||||||
|  | 	WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse | ||||||
|  | 	IsSuccess() bool | ||||||
| } | } | ||||||
|  |  | ||||||
| type jsonHTTPResponse struct { | type InspectableHTTPResponse interface { | ||||||
| 	statusCode int | 	HTTPResponse | ||||||
| 	data       any |  | ||||||
| 	headers    []headerval |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j jsonHTTPResponse) Write(g *gin.Context) { | 	Statuscode() int | ||||||
| 	for _, v := range j.headers { | 	BodyString(g *gin.Context) *string | ||||||
| 		g.Header(v.Key, v.Val) | 	ContentType() string | ||||||
| 	} | 	Headers() []string | ||||||
| 	g.Render(j.statusCode, json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true}) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse { |  | ||||||
| 	j.headers = append(j.headers, headerval{k, v}) |  | ||||||
| 	return j |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type emptyHTTPResponse struct { |  | ||||||
| 	statusCode int |  | ||||||
| 	headers    []headerval |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j emptyHTTPResponse) Write(g *gin.Context) { |  | ||||||
| 	for _, v := range j.headers { |  | ||||||
| 		g.Header(v.Key, v.Val) |  | ||||||
| 	} |  | ||||||
| 	g.Status(j.statusCode) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse { |  | ||||||
| 	j.headers = append(j.headers, headerval{k, v}) |  | ||||||
| 	return j |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type textHTTPResponse struct { |  | ||||||
| 	statusCode int |  | ||||||
| 	data       string |  | ||||||
| 	headers    []headerval |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j textHTTPResponse) Write(g *gin.Context) { |  | ||||||
| 	for _, v := range j.headers { |  | ||||||
| 		g.Header(v.Key, v.Val) |  | ||||||
| 	} |  | ||||||
| 	g.String(j.statusCode, "%s", j.data) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse { |  | ||||||
| 	j.headers = append(j.headers, headerval{k, v}) |  | ||||||
| 	return j |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type dataHTTPResponse struct { |  | ||||||
| 	statusCode  int |  | ||||||
| 	data        []byte |  | ||||||
| 	contentType string |  | ||||||
| 	headers     []headerval |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j dataHTTPResponse) Write(g *gin.Context) { |  | ||||||
| 	for _, v := range j.headers { |  | ||||||
| 		g.Header(v.Key, v.Val) |  | ||||||
| 	} |  | ||||||
| 	g.Data(j.statusCode, j.contentType, j.data) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse { |  | ||||||
| 	j.headers = append(j.headers, headerval{k, v}) |  | ||||||
| 	return j |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type fileHTTPResponse struct { |  | ||||||
| 	mimetype string |  | ||||||
| 	filepath string |  | ||||||
| 	filename *string |  | ||||||
| 	headers  []headerval |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j fileHTTPResponse) Write(g *gin.Context) { |  | ||||||
| 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... |  | ||||||
| 	if j.filename != nil { |  | ||||||
| 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) |  | ||||||
|  |  | ||||||
| 	} |  | ||||||
| 	for _, v := range j.headers { |  | ||||||
| 		g.Header(v.Key, v.Val) |  | ||||||
| 	} |  | ||||||
| 	g.File(j.filepath) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse { |  | ||||||
| 	j.headers = append(j.headers, headerval{k, v}) |  | ||||||
| 	return j |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type downloadDataHTTPResponse struct { |  | ||||||
| 	statusCode int |  | ||||||
| 	mimetype   string |  | ||||||
| 	data       []byte |  | ||||||
| 	filename   *string |  | ||||||
| 	headers    []headerval |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j downloadDataHTTPResponse) Write(g *gin.Context) { |  | ||||||
| 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... |  | ||||||
| 	if j.filename != nil { |  | ||||||
| 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) |  | ||||||
|  |  | ||||||
| 	} |  | ||||||
| 	for _, v := range j.headers { |  | ||||||
| 		g.Header(v.Key, v.Val) |  | ||||||
| 	} |  | ||||||
| 	g.Data(j.statusCode, j.mimetype, j.data) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse { |  | ||||||
| 	j.headers = append(j.headers, headerval{k, v}) |  | ||||||
| 	return j |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type redirectHTTPResponse struct { |  | ||||||
| 	statusCode int |  | ||||||
| 	url        string |  | ||||||
| 	headers    []headerval |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j redirectHTTPResponse) Write(g *gin.Context) { |  | ||||||
| 	g.Redirect(j.statusCode, j.url) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse { |  | ||||||
| 	j.headers = append(j.headers, headerval{k, v}) |  | ||||||
| 	return j |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type jsonAPIErrResponse struct { |  | ||||||
| 	err     *exerr.ExErr |  | ||||||
| 	headers []headerval |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j jsonAPIErrResponse) Write(g *gin.Context) { |  | ||||||
| 	j.err.Output(g) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { |  | ||||||
| 	j.headers = append(j.headers, headerval{k, v}) |  | ||||||
| 	return j |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Status(sc int) HTTPResponse { |  | ||||||
| 	return &emptyHTTPResponse{statusCode: sc} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func JSON(sc int, data any) HTTPResponse { |  | ||||||
| 	return &jsonHTTPResponse{statusCode: sc, data: data} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Data(sc int, contentType string, data []byte) HTTPResponse { |  | ||||||
| 	return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Text(sc int, data string) HTTPResponse { |  | ||||||
| 	return &textHTTPResponse{statusCode: sc, data: data} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func File(mimetype string, filepath string) HTTPResponse { |  | ||||||
| 	return &fileHTTPResponse{mimetype: mimetype, filepath: filepath} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Download(mimetype string, filepath string, filename string) HTTPResponse { |  | ||||||
| 	return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse { |  | ||||||
| 	return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Redirect(sc int, newURL string) HTTPResponse { |  | ||||||
| 	return &redirectHTTPResponse{statusCode: sc, url: newURL} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Error(e error) HTTPResponse { |  | ||||||
| 	return &jsonAPIErrResponse{ |  | ||||||
| 		err: exerr.FromError(e), |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse { |  | ||||||
| 	return &jsonAPIErrResponse{ |  | ||||||
| 		err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()), |  | ||||||
| 	} |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func NotImplemented() HTTPResponse { | func NotImplemented() HTTPResponse { | ||||||
|   | |||||||
							
								
								
									
										58
									
								
								ginext/responseData.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								ginext/responseData.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,58 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type dataHTTPResponse struct { | ||||||
|  | 	statusCode  int | ||||||
|  | 	data        []byte | ||||||
|  | 	contentType string | ||||||
|  | 	headers     []headerval | ||||||
|  | 	cookies     []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  | 	g.Data(j.statusCode, j.contentType, j.data) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return langext.Ptr(string(j.data)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) ContentType() string { | ||||||
|  | 	return j.contentType | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Data(sc int, contentType string, data []byte) HTTPResponse { | ||||||
|  | 	return &dataHTTPResponse{statusCode: sc, contentType: contentType, data: data} | ||||||
|  | } | ||||||
							
								
								
									
										64
									
								
								ginext/responseDownload.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								ginext/responseDownload.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type downloadDataHTTPResponse struct { | ||||||
|  | 	statusCode int | ||||||
|  | 	mimetype   string | ||||||
|  | 	data       []byte | ||||||
|  | 	filename   *string | ||||||
|  | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... | ||||||
|  | 	if j.filename != nil { | ||||||
|  | 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  | 	g.Data(j.statusCode, j.mimetype, j.data) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return langext.Ptr(string(j.data)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) ContentType() string { | ||||||
|  | 	return j.mimetype | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func DownloadData(status int, mimetype string, filename string, data []byte) HTTPResponse { | ||||||
|  | 	return &downloadDataHTTPResponse{statusCode: status, mimetype: mimetype, data: data, filename: &filename} | ||||||
|  | } | ||||||
							
								
								
									
										56
									
								
								ginext/responseEmpty.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								ginext/responseEmpty.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type emptyHTTPResponse struct { | ||||||
|  | 	statusCode int | ||||||
|  | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  | 	g.Status(j.statusCode) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) ContentType() string { | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Status(sc int) HTTPResponse { | ||||||
|  | 	return &emptyHTTPResponse{statusCode: sc} | ||||||
|  | } | ||||||
							
								
								
									
										73
									
								
								ginext/responseFile.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										73
									
								
								ginext/responseFile.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,73 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"os" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type fileHTTPResponse struct { | ||||||
|  | 	mimetype string | ||||||
|  | 	filepath string | ||||||
|  | 	filename *string | ||||||
|  | 	headers  []headerval | ||||||
|  | 	cookies  []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... | ||||||
|  | 	if j.filename != nil { | ||||||
|  | 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  | 	g.File(j.filepath) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) Statuscode() int { | ||||||
|  | 	return 200 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	data, err := os.ReadFile(j.filepath) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return langext.Ptr(string(data)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) ContentType() string { | ||||||
|  | 	return j.mimetype | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func File(mimetype string, filepath string) HTTPResponse { | ||||||
|  | 	return &fileHTTPResponse{mimetype: mimetype, filepath: filepath} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Download(mimetype string, filepath string, filename string) HTTPResponse { | ||||||
|  | 	return &fileHTTPResponse{mimetype: mimetype, filepath: filepath, filename: &filename} | ||||||
|  | } | ||||||
							
								
								
									
										70
									
								
								ginext/responseJson.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										70
									
								
								ginext/responseJson.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,70 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	json "gogs.mikescher.com/BlackForestBytes/goext/gojson" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type jsonHTTPResponse struct { | ||||||
|  | 	statusCode int | ||||||
|  | 	data       any | ||||||
|  | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender { | ||||||
|  | 	var f *string | ||||||
|  | 	if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" { | ||||||
|  | 		f = &jsonfilter | ||||||
|  | 	} | ||||||
|  | 	return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  | 	g.Render(j.statusCode, j.jsonRenderer(g)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) BodyString(g *gin.Context) *string { | ||||||
|  | 	if str, err := j.jsonRenderer(g).RenderString(); err == nil { | ||||||
|  | 		return &str | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) ContentType() string { | ||||||
|  | 	return "application/json" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func JSON(sc int, data any) HTTPResponse { | ||||||
|  | 	return &jsonHTTPResponse{statusCode: sc, data: data} | ||||||
|  | } | ||||||
							
								
								
									
										77
									
								
								ginext/responseJsonAPI.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								ginext/responseJsonAPI.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,77 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type jsonAPIErrResponse struct { | ||||||
|  | 	err     *exerr.ExErr | ||||||
|  | 	headers []headerval | ||||||
|  | 	cookies []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	exerr.Get(j.err).Output(context.Background(), g) | ||||||
|  |  | ||||||
|  | 	j.err.CallListener(exerr.MethodOutput) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) IsSuccess() bool { | ||||||
|  | 	return false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) Statuscode() int { | ||||||
|  | 	return langext.Coalesce(j.err.RecursiveStatuscode(), 0) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	if str, err := j.err.ToDefaultAPIJson(); err == nil { | ||||||
|  | 		return &str | ||||||
|  | 	} else { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) ContentType() string { | ||||||
|  | 	return "application/json" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) Unwrap() error { | ||||||
|  | 	return j.err | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Error(e error) HTTPResponse { | ||||||
|  | 	return &jsonAPIErrResponse{ | ||||||
|  | 		err: exerr.FromError(e), | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ErrWrap(e error, errorType exerr.ErrorType, msg string) HTTPResponse { | ||||||
|  | 	return &jsonAPIErrResponse{ | ||||||
|  | 		err: exerr.FromError(exerr.Wrap(e, msg).WithType(errorType).Build()), | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										57
									
								
								ginext/responseRedirect.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								ginext/responseRedirect.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type redirectHTTPResponse struct { | ||||||
|  | 	statusCode int | ||||||
|  | 	url        string | ||||||
|  | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  | 	g.Redirect(j.statusCode, j.url) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) ContentType() string { | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Redirect(sc int, newURL string) HTTPResponse { | ||||||
|  | 	return &redirectHTTPResponse{statusCode: sc, url: newURL} | ||||||
|  | } | ||||||
							
								
								
									
										72
									
								
								ginext/responseSeekable.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								ginext/responseSeekable.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type seekableResponse struct { | ||||||
|  | 	data        io.ReadSeeker | ||||||
|  | 	contentType string | ||||||
|  | 	filename    string | ||||||
|  | 	headers     []headerval | ||||||
|  | 	cookies     []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j seekableResponse) Write(g *gin.Context) { | ||||||
|  | 	g.Header("Content-Type", j.contentType) // if we don't set it here http.ServeContent does weird sniffing later... | ||||||
|  |  | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	http.ServeContent(g.Writer, g.Request, j.filename, time.Unix(0, 0), j.data) | ||||||
|  |  | ||||||
|  | 	if clsr, ok := j.data.(io.ReadSeekCloser); ok { | ||||||
|  | 		err := clsr.Close() | ||||||
|  | 		if err != nil { | ||||||
|  | 			exerr.Wrap(err, "failed to close io.ReadSeerkClose in ginext.Seekable").Str("filename", j.filename).Print() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j seekableResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j seekableResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j seekableResponse) IsSuccess() bool { | ||||||
|  | 	return true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j seekableResponse) Statuscode() int { | ||||||
|  | 	return 200 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j seekableResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return langext.Ptr("(seekable)") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j seekableResponse) ContentType() string { | ||||||
|  | 	return j.contentType | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j seekableResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Seekable(filename string, contentType string, data io.ReadSeeker) HTTPResponse { | ||||||
|  | 	return &seekableResponse{filename: filename, contentType: contentType, data: data} | ||||||
|  | } | ||||||
							
								
								
									
										57
									
								
								ginext/responseText.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								ginext/responseText.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type textHTTPResponse struct { | ||||||
|  | 	statusCode int | ||||||
|  | 	data       string | ||||||
|  | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
|  | 	g.String(j.statusCode, "%s", j.data) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
|  | 	j.headers = append(j.headers, headerval{k, v}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) IsSuccess() bool { | ||||||
|  | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) Statuscode() int { | ||||||
|  | 	return j.statusCode | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) BodyString(*gin.Context) *string { | ||||||
|  | 	return langext.Ptr(j.data) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) ContentType() string { | ||||||
|  | 	return "text/plain" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) Headers() []string { | ||||||
|  | 	return langext.ArrMap(j.headers, func(v headerval) string { return v.Key + "=" + v.Val }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Text(sc int, data string) HTTPResponse { | ||||||
|  | 	return &textHTTPResponse{statusCode: sc, data: data} | ||||||
|  | } | ||||||
| @@ -3,11 +3,9 @@ package ginext | |||||||
| import ( | import ( | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" |  | ||||||
| 	"net/http" | 	"net/http" | ||||||
| 	"path" | 	"path" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"regexp" |  | ||||||
| 	"runtime" | 	"runtime" | ||||||
| 	"strings" | 	"strings" | ||||||
| ) | ) | ||||||
| @@ -55,7 +53,11 @@ func (w *GinRoutesWrapper) Group(relativePath string) *GinRoutesWrapper { | |||||||
| func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper { | func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper { | ||||||
| 	defHandler := langext.ArrCopy(w.defaultHandler) | 	defHandler := langext.ArrCopy(w.defaultHandler) | ||||||
| 	defHandler = append(defHandler, middleware...) | 	defHandler = append(defHandler, middleware...) | ||||||
| 	return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler} | 	return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler, absPath: w.absPath} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper { | ||||||
|  | 	return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) }) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder { | func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder { | ||||||
| @@ -109,6 +111,10 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder { | |||||||
| 	return w | 	return w | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder { | ||||||
|  | 	return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) }) | ||||||
|  | } | ||||||
|  |  | ||||||
| func (w *GinRouteBuilder) Handle(handler WHandlerFunc) { | func (w *GinRouteBuilder) Handle(handler WHandlerFunc) { | ||||||
|  |  | ||||||
| 	if w.routes.wrapper.bufferBody { | 	if w.routes.wrapper.bufferBody { | ||||||
| @@ -181,12 +187,6 @@ func nameOfFunction(f any) string { | |||||||
| 		fname = fname[:len(fname)-len("-fm")] | 		fname = fname[:len(fname)-len("-fm")] | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	suffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`)) |  | ||||||
|  |  | ||||||
| 	if match, ok := suffix.MatchFirst(fname); ok { |  | ||||||
| 		fname = fname[:len(fname)-match.FullMatch().Length()] |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return fname | 	return fname | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										11
									
								
								ginext/session.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								ginext/session.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | |||||||
|  | package ginext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type SessionObject interface { | ||||||
|  | 	Init(g *gin.Context, ctx *AppContext) error | ||||||
|  | 	Finish(ctx context.Context, resp HTTPResponse) error | ||||||
|  | } | ||||||
							
								
								
									
										68
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										68
									
								
								go.mod
									
									
									
									
									
								
							| @@ -1,50 +1,66 @@ | |||||||
| module gogs.mikescher.com/BlackForestBytes/goext | module gogs.mikescher.com/BlackForestBytes/goext | ||||||
|  |  | ||||||
| go 1.19 | go 1.22 | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/gin-gonic/gin v1.9.1 | 	github.com/gin-gonic/gin v1.10.0 | ||||||
| 	github.com/jmoiron/sqlx v1.3.5 | 	github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.- | ||||||
|  | 	github.com/jmoiron/sqlx v1.4.0 | ||||||
| 	github.com/rs/xid v1.5.0 | 	github.com/rs/xid v1.5.0 | ||||||
| 	github.com/rs/zerolog v1.30.0 | 	github.com/rs/zerolog v1.33.0 | ||||||
| 	go.mongodb.org/mongo-driver v1.12.1 | 	go.mongodb.org/mongo-driver v1.15.0 | ||||||
| 	golang.org/x/crypto v0.12.0 | 	golang.org/x/crypto v0.24.0 | ||||||
| 	golang.org/x/sys v0.11.0 | 	golang.org/x/sys v0.21.0 | ||||||
| 	golang.org/x/term v0.11.0 | 	golang.org/x/term v0.21.0 | ||||||
| ) | ) | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/bytedance/sonic v1.10.0 // indirect | 	github.com/disintegration/imaging v1.6.2 | ||||||
|  | 	github.com/jung-kurt/gofpdf v1.16.2 | ||||||
|  | 	golang.org/x/sync v0.7.0 | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | require ( | ||||||
|  | 	github.com/bytedance/sonic v1.11.8 // indirect | ||||||
|  | 	github.com/bytedance/sonic/loader v0.1.1 // indirect | ||||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | ||||||
| 	github.com/chenzhuoyu/iasm v0.9.0 // indirect | 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | ||||||
| 	github.com/gabriel-vasile/mimetype v1.4.2 // indirect | 	github.com/cloudwego/base64x v0.1.4 // indirect | ||||||
|  | 	github.com/cloudwego/iasm v0.2.0 // indirect | ||||||
|  | 	github.com/dustin/go-humanize v1.0.1 // indirect | ||||||
|  | 	github.com/gabriel-vasile/mimetype v1.4.4 // indirect | ||||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||||
| 	github.com/go-playground/locales v0.14.1 // indirect | 	github.com/go-playground/locales v0.14.1 // indirect | ||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| 	github.com/go-playground/validator/v10 v10.15.0 // indirect | 	github.com/go-playground/validator/v10 v10.21.0 // indirect | ||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.3 // indirect | ||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
|  | 	github.com/google/uuid v1.5.0 // indirect | ||||||
| 	github.com/json-iterator/go v1.1.12 // indirect | 	github.com/json-iterator/go v1.1.12 // indirect | ||||||
| 	github.com/klauspost/compress v1.16.7 // indirect | 	github.com/klauspost/compress v1.17.8 // indirect | ||||||
| 	github.com/klauspost/cpuid/v2 v2.2.5 // indirect | 	github.com/klauspost/cpuid/v2 v2.2.8 // indirect | ||||||
| 	github.com/leodido/go-urn v1.2.4 // indirect | 	github.com/leodido/go-urn v1.4.0 // indirect | ||||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||||
| 	github.com/mattn/go-isatty v0.0.19 // indirect | 	github.com/mattn/go-isatty v0.0.20 // indirect | ||||||
| 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | ||||||
| 	github.com/modern-go/reflect2 v1.0.2 // indirect | 	github.com/modern-go/reflect2 v1.0.2 // indirect | ||||||
| 	github.com/montanaflynn/stats v0.7.1 // indirect | 	github.com/montanaflynn/stats v0.7.1 // indirect | ||||||
| 	github.com/pelletier/go-toml/v2 v2.0.9 // indirect | 	github.com/pelletier/go-toml/v2 v2.2.2 // indirect | ||||||
| 	github.com/pkg/errors v0.9.1 // indirect | 	github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect | ||||||
| 	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect | 	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect | ||||||
| 	github.com/ugorji/go/codec v1.2.11 // indirect | 	github.com/ugorji/go/codec v1.2.12 // indirect | ||||||
| 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | ||||||
| 	github.com/xdg-go/scram v1.1.2 // indirect | 	github.com/xdg-go/scram v1.1.2 // indirect | ||||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||||
| 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | 	github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 // indirect | ||||||
| 	golang.org/x/arch v0.4.0 // indirect | 	golang.org/x/arch v0.8.0 // indirect | ||||||
| 	golang.org/x/net v0.14.0 // indirect | 	golang.org/x/image v0.17.0 // indirect | ||||||
| 	golang.org/x/sync v0.3.0 // indirect | 	golang.org/x/net v0.26.0 // indirect | ||||||
| 	golang.org/x/text v0.12.0 // indirect | 	golang.org/x/text v0.16.0 // indirect | ||||||
| 	google.golang.org/protobuf v1.31.0 // indirect | 	google.golang.org/protobuf v1.34.2 // indirect | ||||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||||
|  | 	modernc.org/libc v1.37.6 // indirect | ||||||
|  | 	modernc.org/mathutil v1.6.0 // indirect | ||||||
|  | 	modernc.org/memory v1.7.2 // indirect | ||||||
|  | 	modernc.org/sqlite v1.28.0 // indirect | ||||||
| ) | ) | ||||||
|   | |||||||
							
								
								
									
										336
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										336
									
								
								go.sum
									
									
									
									
									
								
							| @@ -1,221 +1,355 @@ | |||||||
|  | filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= | ||||||
|  | github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= | ||||||
| github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= | github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= | ||||||
| github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= | github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= | ||||||
| github.com/bytedance/sonic v1.10.0-rc2 h1:oDfRZ+4m6AYCOC0GFeOCeYqvBmucy1isvouS2K0cPzo= | github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= | ||||||
| github.com/bytedance/sonic v1.10.0-rc2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
| github.com/bytedance/sonic v1.10.0-rc3 h1:uNSnscRapXTwUgTyOF0GVljYD08p9X/Lbr9MweSV3V0= | github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo= | ||||||
| github.com/bytedance/sonic v1.10.0-rc3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
| github.com/bytedance/sonic v1.10.0 h1:qtNZduETEIWJVIyDl01BeNxur2rW9OwTQ/yBqFRkKEk= | github.com/bytedance/sonic v1.11.1 h1:JC0+6c9FoWYYxakaoa+c5QTtJeiSZNeByOBhXtAFSn4= | ||||||
| github.com/bytedance/sonic v1.10.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
|  | github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A= | ||||||
|  | github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
|  | github.com/bytedance/sonic v1.11.3 h1:jRN+yEjakWh8aK5FzrciUHG8OFXK+4/KrAX/ysEtHAA= | ||||||
|  | github.com/bytedance/sonic v1.11.3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||||
|  | github.com/bytedance/sonic v1.11.4 h1:8+OMLSSDDm2/qJc6ld5K5Sm62NK9VHcUKk0NzBoMAM4= | ||||||
|  | github.com/bytedance/sonic v1.11.4/go.mod h1:YrWEqYtlBPS6LUA0vpuG79a1trsh4Ae41uWUWUreHhE= | ||||||
|  | github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k= | ||||||
|  | github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw= | ||||||
|  | github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0= | ||||||
|  | github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||||
|  | github.com/bytedance/sonic v1.11.7 h1:k/l9p1hZpNIMJSk37wL9ltkcpqLfIho1vYthi4xT2t4= | ||||||
|  | github.com/bytedance/sonic v1.11.7/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||||
|  | github.com/bytedance/sonic v1.11.8 h1:Zw/j1KfiS+OYTi9lyB3bb0CFxPJVkM17k1wyDG32LRA= | ||||||
|  | github.com/bytedance/sonic v1.11.8/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||||
|  | github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY= | ||||||
|  | github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= | ||||||
|  | github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= | github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | ||||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | ||||||
| github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo= |  | ||||||
| github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
| github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= | ||||||
|  | github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||||
|  | github.com/cloudwego/base64x v0.1.0 h1:Tg5q9tq1khq9Y9UwfoC6zkHK0FypN2GLDvhqFceOL8U= | ||||||
|  | github.com/cloudwego/base64x v0.1.0/go.mod h1:lM8nFiNbg74QgesNo6EAtv8N9tlRjBWExmHoNDa3PkU= | ||||||
|  | github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg= | ||||||
|  | github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8= | ||||||
|  | github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= | ||||||
|  | github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= | ||||||
|  | github.com/cloudwego/iasm v0.0.9/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||||
|  | github.com/cloudwego/iasm v0.1.0 h1:q0OuhwWDMyi3nlrQ6kIr0Yx0c3FI6cq/OZWKodIDdz8= | ||||||
|  | github.com/cloudwego/iasm v0.1.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||||
|  | github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= | ||||||
|  | github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||||
| github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | ||||||
| github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
|  | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | ||||||
| github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= | github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= | github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4= | ||||||
|  | github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= | ||||||
|  | github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s= | ||||||
| github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | ||||||
| github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | ||||||
| github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | ||||||
| github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= | github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= | ||||||
|  | github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= | ||||||
|  | github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= | ||||||
|  | github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ= | ||||||
|  | github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc= | ||||||
|  | github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= | ||||||
|  | github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= | ||||||
| github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= | github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= | ||||||
| github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= | github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= | ||||||
| github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= | github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= | ||||||
| github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | ||||||
| github.com/go-playground/validator/v10 v10.14.1 h1:9c50NUPC30zyuKprjL3vNZ0m5oG+jU0zvx4AqHGnv4k= | github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | ||||||
| github.com/go-playground/validator/v10 v10.14.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
| github.com/go-playground/validator/v10 v10.15.0 h1:nDU5XeOKtB3GEa+uB7GNYwhVKsgjAR7VgKoNB6ryXfw= | github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74= | ||||||
| github.com/go-playground/validator/v10 v10.15.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
|  | github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U= | ||||||
|  | github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
|  | github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= | ||||||
|  | github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
|  | github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8= | ||||||
|  | github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
|  | github.com/go-playground/validator/v10 v10.21.0 h1:4fZA11ovvtkdgaeev9RGWPgc1uj3H8W+rNYyH/ySBb0= | ||||||
|  | github.com/go-playground/validator/v10 v10.21.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||||
|  | github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||||
|  | github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= | ||||||
|  | github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= | ||||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||||
| github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | ||||||
|  | github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= | ||||||
|  | github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= | ||||||
| github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= | github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= | ||||||
| github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | ||||||
| github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= |  | ||||||
| github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||||
| github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= | github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= | ||||||
| github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||||
| github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | ||||||
|  | github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= | ||||||
| github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | ||||||
| github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= | github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= | ||||||
|  | github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ= | ||||||
|  | github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= | ||||||
|  | github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= | ||||||
|  | github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||||||
| github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | ||||||
| github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | ||||||
|  | github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= | ||||||
|  | github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= | ||||||
| github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= | github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= | ||||||
| github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= | github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= | ||||||
| github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= | github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= | ||||||
|  | github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc= | ||||||
|  | github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= | ||||||
| github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | ||||||
| github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= | github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= | ||||||
| github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
|  | github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= | ||||||
|  | github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
|  | github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= | ||||||
|  | github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||||
|  | github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU= | ||||||
|  | github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= | github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= | ||||||
|  | github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
| github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | ||||||
| github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= |  | ||||||
| github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= |  | ||||||
| github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= |  | ||||||
| github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | ||||||
| github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | ||||||
|  | github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= | ||||||
|  | github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= | ||||||
|  | github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= | ||||||
| github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= | github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= | ||||||
| github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= | github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= | ||||||
| github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= | github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= | ||||||
| github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= | github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= | ||||||
| github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= |  | ||||||
| github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | ||||||
| github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= |  | ||||||
| github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= |  | ||||||
| github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= |  | ||||||
| github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | ||||||
|  | github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= | ||||||
|  | github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | ||||||
| github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | ||||||
|  | github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= | ||||||
|  | github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= | ||||||
|  | github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= | ||||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||||
| github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= | github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= | ||||||
| github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= | github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= | ||||||
| github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= |  | ||||||
| github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= | github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= | ||||||
| github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= | github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= | ||||||
| github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= | github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= | ||||||
| github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0= | github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= | ||||||
| github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= | github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= | ||||||
| github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= | github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= | ||||||
|  | github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||||
|  | github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= | ||||||
|  | github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||||
| github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||||
|  | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | ||||||
| github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | ||||||
| github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= | github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= | ||||||
|  | github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= | ||||||
| github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= | github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= | ||||||
| github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= | github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= | ||||||
| github.com/rs/zerolog v1.29.0 h1:Zes4hju04hjbvkVkOhdl2HpZa+0PmVwigmo8XoORE5w= | github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= | ||||||
| github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0= | github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||||
| github.com/rs/zerolog v1.29.1 h1:cO+d60CHkknCbvzEWxP0S9K6KqyTjrCNUy1LdQLCGPc= | github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= | ||||||
| github.com/rs/zerolog v1.29.1/go.mod h1:Le6ESbR7hc+DP6Lt1THiV8CQSdkkNrd3R0XbEgp3ZBU= | github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||||
| github.com/rs/zerolog v1.30.0 h1:SymVODrcRsaRaSInD9yQtKbtWqwsfoPcRff/oRXLj4c= | github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= | ||||||
| github.com/rs/zerolog v1.30.0/go.mod h1:/tk+P47gFdPXq4QYjvCmT5/Gsug2nagsFWBWhAiSi1w= | github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||||
|  | github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= | ||||||
| github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= | ||||||
| github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= | github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= | ||||||
| github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= | github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= | ||||||
|  | github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= | ||||||
|  | github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= | ||||||
| github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | ||||||
| github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= |  | ||||||
| github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | ||||||
| github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | ||||||
| github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= | github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= | ||||||
| github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | ||||||
| github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | ||||||
|  | github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= | ||||||
| github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= | github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= | ||||||
| github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= | github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= | ||||||
|  | github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= | ||||||
| github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= | github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= | ||||||
| github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= | github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= | ||||||
| github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= | github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= | ||||||
| github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= | github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= | ||||||
| github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= | github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= | ||||||
| github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= | github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= | ||||||
| github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E= |  | ||||||
| github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= |  | ||||||
| github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= | github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= | ||||||
| github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= | github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= | ||||||
| github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs= |  | ||||||
| github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= |  | ||||||
| github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= | github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= | ||||||
| github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= | github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= | ||||||
| github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= |  | ||||||
| github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= | github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= | ||||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= | github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= | ||||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= | github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= | ||||||
|  | github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 h1:tBiBTKHnIjovYoLX/TPkcf+OjqqKGQrPtGT3Foz+Pgo= | ||||||
|  | github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76/go.mod h1:SQliXeA7Dhkt//vS29v3zpbEwoa+zb2Cn5xj5uO4K5U= | ||||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||||
| go.mongodb.org/mongo-driver v1.11.2 h1:+1v2rDQUWNcGW7/7E0Jvdz51V38XXxJfhzbV17aNHCw= | go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= | ||||||
| go.mongodb.org/mongo-driver v1.11.2/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8= | go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= | ||||||
| go.mongodb.org/mongo-driver v1.12.0 h1:aPx33jmn/rQuJXPQLZQ8NtfPQG8CaqgLThFtqRb0PiE= | go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= | ||||||
| go.mongodb.org/mongo-driver v1.12.0/go.mod h1:AZkxhPnFJUoH7kZlFkVKucV20K387miPfm7oimrSmK0= | go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||||
| go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecqgE= | go.mongodb.org/mongo-driver v1.15.0 h1:rJCKC8eEliewXjZGf0ddURtl7tTVy1TK3bfl0gkUSLc= | ||||||
| go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ= | go.mongodb.org/mongo-driver v1.15.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||||
| golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||||
| golang.org/x/arch v0.4.0 h1:A8WCeEWhLwPBKNbFi5Wv5UTCBx5zzubnXDlMOFAzFMc= | golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= | ||||||
| golang.org/x/arch v0.4.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||||
|  | golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc= | ||||||
|  | golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||||
| golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||||
| golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | ||||||
| golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | ||||||
| golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8= | golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= | ||||||
| golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80= | golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= | ||||||
| golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= | golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= | ||||||
| golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= | golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= | ||||||
| golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= | golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= | ||||||
| golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= | golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= | ||||||
|  | golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg= | ||||||
|  | golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= | ||||||
|  | golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= | ||||||
|  | golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= | ||||||
|  | golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= | ||||||
|  | golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= | ||||||
|  | golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= | ||||||
|  | golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= | ||||||
|  | golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= | ||||||
|  | golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= | ||||||
|  | golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | ||||||
|  | golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U= | ||||||
|  | golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | ||||||
|  | golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw= | ||||||
|  | golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs= | ||||||
|  | golang.org/x/image v0.17.0 h1:nTRVVdajgB8zCMZVsViyzhnMKPwYeroEERRC64JuLco= | ||||||
|  | golang.org/x/image v0.17.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E= | ||||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||||
| golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= | golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= | ||||||
| golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= | golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= | ||||||
| golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | ||||||
| golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= | golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= | ||||||
| golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= | golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= | ||||||
| golang.org/x/net v0.13.0 h1:Nvo8UFsZ8X3BhAC9699Z1j7XQ3rsZnUUm7jfBEk1ueY= | golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= | ||||||
| golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= | golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= | ||||||
| golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= | golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= | ||||||
| golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= | golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= | ||||||
|  | golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc= | ||||||
|  | golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= | ||||||
|  | golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= | ||||||
|  | golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= | ||||||
|  | golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= | ||||||
|  | golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= | ||||||
|  | golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= | ||||||
|  | golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= | ||||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= |  | ||||||
| golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= |  | ||||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= | golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | ||||||
| golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= | golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||||
|  | golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= | ||||||
|  | golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||||
| golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||||
| golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||||
| golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= |  | ||||||
| golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= |  | ||||||
| golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U= |  | ||||||
| golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= |  | ||||||
| golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= |  | ||||||
| golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= |  | ||||||
| golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= |  | ||||||
| golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= | golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||||
| golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= | ||||||
| golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= | golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= | ||||||
|  | golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
|  | golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= | ||||||
|  | golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
|  | golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= | ||||||
|  | golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
|  | golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= | ||||||
|  | golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
|  | golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= | ||||||
|  | golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||||
| golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||||
| golang.org/x/term v0.1.0 h1:g6Z6vPFA9dYBAF7DWcH6sCcOntplXsDKcliusYijMlw= | golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= | ||||||
| golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= | ||||||
| golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI= | golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= | ||||||
| golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= | golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= | ||||||
| golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c= | golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= | ||||||
| golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= | golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= | ||||||
| golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0= | golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= | ||||||
| golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= | golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= | ||||||
|  | golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw= | ||||||
|  | golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= | ||||||
|  | golang.org/x/term v0.21.0 h1:WVXCp+/EBEHOj53Rvu+7KiT/iElMrO8ACK16SMZ3jaA= | ||||||
|  | golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= | ||||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||||
| golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= | golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= | ||||||
| golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | ||||||
| golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= |  | ||||||
| golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= |  | ||||||
| golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | ||||||
| golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= | golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= | ||||||
| golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= | golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||||
| golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= | golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= | ||||||
| golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= | golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||||
|  | golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= | ||||||
|  | golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= | ||||||
| golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | ||||||
| golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | ||||||
| golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||||
| golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||||
| google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= | golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= | ||||||
| google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= | golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= | ||||||
| google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= | google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= | ||||||
|  | google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
|  | google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= | ||||||
|  | google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
|  | google.golang.org/protobuf v1.34.0 h1:Qo/qEd2RZPCf2nKuorzksSknv0d3ERwp1vFG38gSmH4= | ||||||
|  | google.golang.org/protobuf v1.34.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
|  | google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= | ||||||
|  | google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||||
|  | google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= | ||||||
|  | google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= | ||||||
|  | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||||
| gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= |  | ||||||
| gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||||
| gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= | gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= | ||||||
| gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||||
|  | modernc.org/libc v1.37.6 h1:orZH3c5wmhIQFTXF+Nt+eeauyd+ZIt2BX6ARe+kD+aw= | ||||||
|  | modernc.org/libc v1.37.6/go.mod h1:YAXkAZ8ktnkCKaN9sw/UDeUVkGYJ/YquGO4FTi5nmHE= | ||||||
|  | modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4= | ||||||
|  | modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo= | ||||||
|  | modernc.org/memory v1.7.2 h1:Klh90S215mmH8c9gO98QxQFsY+W451E8AnzjoE2ee1E= | ||||||
|  | modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E= | ||||||
|  | modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ= | ||||||
|  | modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0= | ||||||
| nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= | nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= | ||||||
| rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= | rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.242" | const GoextVersion = "0.0.469" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2023-08-14T16:05:12+0200" | const GoextVersionTimestamp = "2024-06-11T12:10:49+0200" | ||||||
|   | |||||||
| @@ -217,6 +217,7 @@ type decodeState struct { | |||||||
| 	savedError            error | 	savedError            error | ||||||
| 	useNumber             bool | 	useNumber             bool | ||||||
| 	disallowUnknownFields bool | 	disallowUnknownFields bool | ||||||
|  | 	tagkey                *string | ||||||
| } | } | ||||||
|  |  | ||||||
| // readIndex returns the position of the last byte read. | // readIndex returns the position of the last byte read. | ||||||
| @@ -652,7 +653,11 @@ func (d *decodeState) object(v reflect.Value) error { | |||||||
| 			v.Set(reflect.MakeMap(t)) | 			v.Set(reflect.MakeMap(t)) | ||||||
| 		} | 		} | ||||||
| 	case reflect.Struct: | 	case reflect.Struct: | ||||||
| 		fields = cachedTypeFields(t) | 		tagkey := "json" | ||||||
|  | 		if d.tagkey != nil { | ||||||
|  | 			tagkey = *d.tagkey | ||||||
|  | 		} | ||||||
|  | 		fields = cachedTypeFields(t, tagkey) | ||||||
| 		// ok | 		// ok | ||||||
| 	default: | 	default: | ||||||
| 		d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)}) | 		d.saveError(&UnmarshalTypeError{Value: "object", Type: t, Offset: int64(d.off)}) | ||||||
|   | |||||||
							
								
								
									
										113
									
								
								gojson/encode.go
									
									
									
									
									
								
							
							
						
						
									
										113
									
								
								gojson/encode.go
									
									
									
									
									
								
							| @@ -156,7 +156,6 @@ import ( | |||||||
| // an error. | // an error. | ||||||
| func Marshal(v any) ([]byte, error) { | func Marshal(v any) ([]byte, error) { | ||||||
| 	e := newEncodeState() | 	e := newEncodeState() | ||||||
| 	defer encodeStatePool.Put(e) |  | ||||||
|  |  | ||||||
| 	err := e.marshal(v, encOpts{escapeHTML: true}) | 	err := e.marshal(v, encOpts{escapeHTML: true}) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -164,6 +163,8 @@ func Marshal(v any) ([]byte, error) { | |||||||
| 	} | 	} | ||||||
| 	buf := append([]byte(nil), e.Bytes()...) | 	buf := append([]byte(nil), e.Bytes()...) | ||||||
|  |  | ||||||
|  | 	encodeStatePool.Put(e) | ||||||
|  |  | ||||||
| 	return buf, nil | 	return buf, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -174,9 +175,9 @@ type IndentOpt struct { | |||||||
|  |  | ||||||
| // MarshalSafeCollections is like Marshal except it will marshal nil maps and | // MarshalSafeCollections is like Marshal except it will marshal nil maps and | ||||||
| // slices as '{}' and '[]' respectfully instead of 'null' | // slices as '{}' and '[]' respectfully instead of 'null' | ||||||
| func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt) ([]byte, error) { | func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt, filter *string) ([]byte, error) { | ||||||
| 	e := &encodeState{} | 	e := &encodeState{} | ||||||
| 	err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps}) | 	err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps, filter: filter}) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, err | ||||||
| 	} | 	} | ||||||
| @@ -381,7 +382,12 @@ func isEmptyValue(v reflect.Value) bool { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) { | func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) { | ||||||
| 	valueEncoder(v)(e, v, opts) | 	tagkey := "json" | ||||||
|  | 	if opts.tagkey != nil { | ||||||
|  | 		tagkey = *opts.tagkey | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	valueEncoder(v, tagkey)(e, v, opts) | ||||||
| } | } | ||||||
|  |  | ||||||
| type encOpts struct { | type encOpts struct { | ||||||
| @@ -393,20 +399,25 @@ type encOpts struct { | |||||||
| 	nilSafeSlices bool | 	nilSafeSlices bool | ||||||
| 	// nilSafeMaps marshals a nil maps '{}' instead of 'null' | 	// nilSafeMaps marshals a nil maps '{}' instead of 'null' | ||||||
| 	nilSafeMaps bool | 	nilSafeMaps bool | ||||||
|  | 	// filter matches jsonfilter tag of struct | ||||||
|  | 	// marshals if no jsonfilter is set or otherwise if jsonfilter has the filter value | ||||||
|  | 	filter *string | ||||||
|  | 	// use different tag instead of "json" | ||||||
|  | 	tagkey *string | ||||||
| } | } | ||||||
|  |  | ||||||
| type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts) | type encoderFunc func(e *encodeState, v reflect.Value, opts encOpts) | ||||||
|  |  | ||||||
| var encoderCache sync.Map // map[reflect.Type]encoderFunc | var encoderCache sync.Map // map[reflect.Type]encoderFunc | ||||||
|  |  | ||||||
| func valueEncoder(v reflect.Value) encoderFunc { | func valueEncoder(v reflect.Value, tagkey string) encoderFunc { | ||||||
| 	if !v.IsValid() { | 	if !v.IsValid() { | ||||||
| 		return invalidValueEncoder | 		return invalidValueEncoder | ||||||
| 	} | 	} | ||||||
| 	return typeEncoder(v.Type()) | 	return typeEncoder(v.Type(), tagkey) | ||||||
| } | } | ||||||
|  |  | ||||||
| func typeEncoder(t reflect.Type) encoderFunc { | func typeEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	if fi, ok := encoderCache.Load(t); ok { | 	if fi, ok := encoderCache.Load(t); ok { | ||||||
| 		return fi.(encoderFunc) | 		return fi.(encoderFunc) | ||||||
| 	} | 	} | ||||||
| @@ -429,7 +440,7 @@ func typeEncoder(t reflect.Type) encoderFunc { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// Compute the real encoder and replace the indirect func with it. | 	// Compute the real encoder and replace the indirect func with it. | ||||||
| 	f = newTypeEncoder(t, true) | 	f = newTypeEncoder(t, true, tagkey) | ||||||
| 	wg.Done() | 	wg.Done() | ||||||
| 	encoderCache.Store(t, f) | 	encoderCache.Store(t, f) | ||||||
| 	return f | 	return f | ||||||
| @@ -442,19 +453,19 @@ var ( | |||||||
|  |  | ||||||
| // newTypeEncoder constructs an encoderFunc for a type. | // newTypeEncoder constructs an encoderFunc for a type. | ||||||
| // The returned encoder only checks CanAddr when allowAddr is true. | // The returned encoder only checks CanAddr when allowAddr is true. | ||||||
| func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc { | func newTypeEncoder(t reflect.Type, allowAddr bool, tagkey string) encoderFunc { | ||||||
| 	// If we have a non-pointer value whose type implements | 	// If we have a non-pointer value whose type implements | ||||||
| 	// Marshaler with a value receiver, then we're better off taking | 	// Marshaler with a value receiver, then we're better off taking | ||||||
| 	// the address of the value - otherwise we end up with an | 	// the address of the value - otherwise we end up with an | ||||||
| 	// allocation as we cast the value to an interface. | 	// allocation as we cast the value to an interface. | ||||||
| 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(marshalerType) { | 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(marshalerType) { | ||||||
| 		return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false)) | 		return newCondAddrEncoder(addrMarshalerEncoder, newTypeEncoder(t, false, tagkey)) | ||||||
| 	} | 	} | ||||||
| 	if t.Implements(marshalerType) { | 	if t.Implements(marshalerType) { | ||||||
| 		return marshalerEncoder | 		return marshalerEncoder | ||||||
| 	} | 	} | ||||||
| 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(textMarshalerType) { | 	if t.Kind() != reflect.Pointer && allowAddr && reflect.PointerTo(t).Implements(textMarshalerType) { | ||||||
| 		return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false)) | 		return newCondAddrEncoder(addrTextMarshalerEncoder, newTypeEncoder(t, false, tagkey)) | ||||||
| 	} | 	} | ||||||
| 	if t.Implements(textMarshalerType) { | 	if t.Implements(textMarshalerType) { | ||||||
| 		return textMarshalerEncoder | 		return textMarshalerEncoder | ||||||
| @@ -476,15 +487,15 @@ func newTypeEncoder(t reflect.Type, allowAddr bool) encoderFunc { | |||||||
| 	case reflect.Interface: | 	case reflect.Interface: | ||||||
| 		return interfaceEncoder | 		return interfaceEncoder | ||||||
| 	case reflect.Struct: | 	case reflect.Struct: | ||||||
| 		return newStructEncoder(t) | 		return newStructEncoder(t, tagkey) | ||||||
| 	case reflect.Map: | 	case reflect.Map: | ||||||
| 		return newMapEncoder(t) | 		return newMapEncoder(t, tagkey) | ||||||
| 	case reflect.Slice: | 	case reflect.Slice: | ||||||
| 		return newSliceEncoder(t) | 		return newSliceEncoder(t, tagkey) | ||||||
| 	case reflect.Array: | 	case reflect.Array: | ||||||
| 		return newArrayEncoder(t) | 		return newArrayEncoder(t, tagkey) | ||||||
| 	case reflect.Pointer: | 	case reflect.Pointer: | ||||||
| 		return newPtrEncoder(t) | 		return newPtrEncoder(t, tagkey) | ||||||
| 	default: | 	default: | ||||||
| 		return unsupportedTypeEncoder | 		return unsupportedTypeEncoder | ||||||
| 	} | 	} | ||||||
| @@ -777,6 +788,8 @@ FieldLoop: | |||||||
|  |  | ||||||
| 		if f.omitEmpty && isEmptyValue(fv) { | 		if f.omitEmpty && isEmptyValue(fv) { | ||||||
| 			continue | 			continue | ||||||
|  | 		} else if opts.filter != nil && len(f.jsonfilter) > 0 && !f.jsonfilter.Contains(*opts.filter) { | ||||||
|  | 			continue | ||||||
| 		} | 		} | ||||||
| 		e.WriteByte(next) | 		e.WriteByte(next) | ||||||
| 		next = ',' | 		next = ',' | ||||||
| @@ -795,8 +808,8 @@ FieldLoop: | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func newStructEncoder(t reflect.Type) encoderFunc { | func newStructEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	se := structEncoder{fields: cachedTypeFields(t)} | 	se := structEncoder{fields: cachedTypeFields(t, tagkey)} | ||||||
| 	return se.encode | 	return se.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -849,7 +862,7 @@ func (me mapEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | |||||||
| 	e.ptrLevel-- | 	e.ptrLevel-- | ||||||
| } | } | ||||||
|  |  | ||||||
| func newMapEncoder(t reflect.Type) encoderFunc { | func newMapEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	switch t.Key().Kind() { | 	switch t.Key().Kind() { | ||||||
| 	case reflect.String, | 	case reflect.String, | ||||||
| 		reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, | 		reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, | ||||||
| @@ -859,7 +872,7 @@ func newMapEncoder(t reflect.Type) encoderFunc { | |||||||
| 			return unsupportedTypeEncoder | 			return unsupportedTypeEncoder | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	me := mapEncoder{typeEncoder(t.Elem())} | 	me := mapEncoder{typeEncoder(t.Elem(), tagkey)} | ||||||
| 	return me.encode | 	return me.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -930,7 +943,7 @@ func (se sliceEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | |||||||
| 	e.ptrLevel-- | 	e.ptrLevel-- | ||||||
| } | } | ||||||
|  |  | ||||||
| func newSliceEncoder(t reflect.Type) encoderFunc { | func newSliceEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	// Byte slices get special treatment; arrays don't. | 	// Byte slices get special treatment; arrays don't. | ||||||
| 	if t.Elem().Kind() == reflect.Uint8 { | 	if t.Elem().Kind() == reflect.Uint8 { | ||||||
| 		p := reflect.PointerTo(t.Elem()) | 		p := reflect.PointerTo(t.Elem()) | ||||||
| @@ -938,7 +951,7 @@ func newSliceEncoder(t reflect.Type) encoderFunc { | |||||||
| 			return encodeByteSlice | 			return encodeByteSlice | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	enc := sliceEncoder{newArrayEncoder(t)} | 	enc := sliceEncoder{newArrayEncoder(t, tagkey)} | ||||||
| 	return enc.encode | 	return enc.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -958,8 +971,8 @@ func (ae arrayEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | |||||||
| 	e.WriteByte(']') | 	e.WriteByte(']') | ||||||
| } | } | ||||||
|  |  | ||||||
| func newArrayEncoder(t reflect.Type) encoderFunc { | func newArrayEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	enc := arrayEncoder{typeEncoder(t.Elem())} | 	enc := arrayEncoder{typeEncoder(t.Elem(), tagkey)} | ||||||
| 	return enc.encode | 	return enc.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -986,8 +999,8 @@ func (pe ptrEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | |||||||
| 	e.ptrLevel-- | 	e.ptrLevel-- | ||||||
| } | } | ||||||
|  |  | ||||||
| func newPtrEncoder(t reflect.Type) encoderFunc { | func newPtrEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||||
| 	enc := ptrEncoder{typeEncoder(t.Elem())} | 	enc := ptrEncoder{typeEncoder(t.Elem(), tagkey)} | ||||||
| 	return enc.encode | 	return enc.encode | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1224,11 +1237,24 @@ type field struct { | |||||||
| 	index      []int | 	index      []int | ||||||
| 	typ        reflect.Type | 	typ        reflect.Type | ||||||
| 	omitEmpty  bool | 	omitEmpty  bool | ||||||
|  | 	jsonfilter jsonfilter | ||||||
| 	quoted     bool | 	quoted     bool | ||||||
|  |  | ||||||
| 	encoder encoderFunc | 	encoder encoderFunc | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // jsonfilter stores the value of the jsonfilter struct tag | ||||||
|  | type jsonfilter []string | ||||||
|  |  | ||||||
|  | func (j jsonfilter) Contains(t string) bool { | ||||||
|  | 	for _, tag := range j { | ||||||
|  | 		if t == tag { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return false | ||||||
|  | } | ||||||
|  |  | ||||||
| // byIndex sorts field by index sequence. | // byIndex sorts field by index sequence. | ||||||
| type byIndex []field | type byIndex []field | ||||||
|  |  | ||||||
| @@ -1251,7 +1277,7 @@ func (x byIndex) Less(i, j int) bool { | |||||||
| // typeFields returns a list of fields that JSON should recognize for the given type. | // typeFields returns a list of fields that JSON should recognize for the given type. | ||||||
| // The algorithm is breadth-first search over the set of structs to include - the top struct | // The algorithm is breadth-first search over the set of structs to include - the top struct | ||||||
| // and then any reachable anonymous structs. | // and then any reachable anonymous structs. | ||||||
| func typeFields(t reflect.Type) structFields { | func typeFields(t reflect.Type, tagkey string) structFields { | ||||||
| 	// Anonymous fields to explore at the current level and the next. | 	// Anonymous fields to explore at the current level and the next. | ||||||
| 	current := []field{} | 	current := []field{} | ||||||
| 	next := []field{{typ: t}} | 	next := []field{{typ: t}} | ||||||
| @@ -1296,7 +1322,7 @@ func typeFields(t reflect.Type) structFields { | |||||||
| 					// Ignore unexported non-embedded fields. | 					// Ignore unexported non-embedded fields. | ||||||
| 					continue | 					continue | ||||||
| 				} | 				} | ||||||
| 				tag := sf.Tag.Get("json") | 				tag := sf.Tag.Get(tagkey) | ||||||
| 				if tag == "-" { | 				if tag == "-" { | ||||||
| 					continue | 					continue | ||||||
| 				} | 				} | ||||||
| @@ -1304,6 +1330,13 @@ func typeFields(t reflect.Type) structFields { | |||||||
| 				if !isValidTag(name) { | 				if !isValidTag(name) { | ||||||
| 					name = "" | 					name = "" | ||||||
| 				} | 				} | ||||||
|  |  | ||||||
|  | 				var jsonfilter []string | ||||||
|  | 				jsonfilterTag := sf.Tag.Get("jsonfilter") | ||||||
|  | 				if jsonfilterTag != "" && jsonfilterTag != "-" { | ||||||
|  | 					jsonfilter = strings.Split(jsonfilterTag, ",") | ||||||
|  | 				} | ||||||
|  |  | ||||||
| 				index := make([]int, len(f.index)+1) | 				index := make([]int, len(f.index)+1) | ||||||
| 				copy(index, f.index) | 				copy(index, f.index) | ||||||
| 				index[len(f.index)] = i | 				index[len(f.index)] = i | ||||||
| @@ -1339,6 +1372,7 @@ func typeFields(t reflect.Type) structFields { | |||||||
| 						index:      index, | 						index:      index, | ||||||
| 						typ:        ft, | 						typ:        ft, | ||||||
| 						omitEmpty:  opts.Contains("omitempty"), | 						omitEmpty:  opts.Contains("omitempty"), | ||||||
|  | 						jsonfilter: jsonfilter, | ||||||
| 						quoted:     quoted, | 						quoted:     quoted, | ||||||
| 					} | 					} | ||||||
| 					field.nameBytes = []byte(field.name) | 					field.nameBytes = []byte(field.name) | ||||||
| @@ -1422,7 +1456,7 @@ func typeFields(t reflect.Type) structFields { | |||||||
|  |  | ||||||
| 	for i := range fields { | 	for i := range fields { | ||||||
| 		f := &fields[i] | 		f := &fields[i] | ||||||
| 		f.encoder = typeEncoder(typeByIndex(t, f.index)) | 		f.encoder = typeEncoder(typeByIndex(t, f.index), tagkey) | ||||||
| 	} | 	} | ||||||
| 	nameIndex := make(map[string]int, len(fields)) | 	nameIndex := make(map[string]int, len(fields)) | ||||||
| 	for i, field := range fields { | 	for i, field := range fields { | ||||||
| @@ -1447,13 +1481,26 @@ func dominantField(fields []field) (field, bool) { | |||||||
| 	return fields[0], true | 	return fields[0], true | ||||||
| } | } | ||||||
|  |  | ||||||
| var fieldCache sync.Map // map[reflect.Type]structFields | var fieldCache sync.Map // map[string]map[reflect.Type]structFields | ||||||
|  |  | ||||||
| // cachedTypeFields is like typeFields but uses a cache to avoid repeated work. | // cachedTypeFields is like typeFields but uses a cache to avoid repeated work. | ||||||
| func cachedTypeFields(t reflect.Type) structFields { | func cachedTypeFields(t reflect.Type, tagkey string) structFields { | ||||||
| 	if f, ok := fieldCache.Load(t); ok { | 	if m0, ok := fieldCache.Load(tagkey); ok { | ||||||
|  |  | ||||||
|  | 		if f, ok := m0.(*sync.Map).Load(t); ok { | ||||||
| 			return f.(structFields) | 			return f.(structFields) | ||||||
| 		} | 		} | ||||||
| 	f, _ := fieldCache.LoadOrStore(t, typeFields(t)) | 		f, _ := m0.(*sync.Map).LoadOrStore(t, typeFields(t, tagkey)) | ||||||
| 		return f.(structFields) | 		return f.(structFields) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		m0 := &sync.Map{} | ||||||
|  | 		f, _ := m0.LoadOrStore(t, typeFields(t, tagkey)) | ||||||
|  |  | ||||||
|  | 		fieldCache.Store(tagkey, m0) | ||||||
|  |  | ||||||
|  | 		return f.(structFields) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1253,6 +1253,10 @@ func TestMarshalSafeCollections(t *testing.T) { | |||||||
| 		nilMapStruct struct { | 		nilMapStruct struct { | ||||||
| 			NilMap map[string]interface{} `json:"nil_map"` | 			NilMap map[string]interface{} `json:"nil_map"` | ||||||
| 		} | 		} | ||||||
|  | 		testWithFilter struct { | ||||||
|  | 			Test1 string `json:"test1" jsonfilter:"FILTERONE"` | ||||||
|  | 			Test2 string `json:"test2" jsonfilter:"FILTERTWO"` | ||||||
|  | 		} | ||||||
| 	) | 	) | ||||||
|  |  | ||||||
| 	tests := []struct { | 	tests := []struct { | ||||||
| @@ -1271,10 +1275,12 @@ func TestMarshalSafeCollections(t *testing.T) { | |||||||
| 		{map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}"}, | 		{map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}"}, | ||||||
| 		{pNilMap, "null"}, | 		{pNilMap, "null"}, | ||||||
| 		{nilMapStruct{}, "{\"nil_map\":{}}"}, | 		{nilMapStruct{}, "{\"nil_map\":{}}"}, | ||||||
|  | 		{testWithFilter{}, "{\"test1\":\"\"}"}, | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	filter := "FILTERONE" | ||||||
| 	for i, tt := range tests { | 	for i, tt := range tests { | ||||||
| 		b, err := MarshalSafeCollections(tt.in, true, true, nil) | 		b, err := MarshalSafeCollections(tt.in, true, true, nil, &filter) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			t.Errorf("test %d, unexpected failure: %v", i, err) | 			t.Errorf("test %d, unexpected failure: %v", i, err) | ||||||
| 		} | 		} | ||||||
|   | |||||||
| @@ -97,7 +97,10 @@ func equalFoldRight(s, t []byte) bool { | |||||||
| 		t = t[size:] | 		t = t[size:] | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
| 	return len(t) == 0 | 	if len(t) > 0 { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  | 	return true | ||||||
| } | } | ||||||
|  |  | ||||||
| // asciiEqualFold is a specialization of bytes.EqualFold for use when | // asciiEqualFold is a specialization of bytes.EqualFold for use when | ||||||
|   | |||||||
| @@ -52,7 +52,9 @@ func TestFold(t *testing.T) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func TestFoldAgainstUnicode(t *testing.T) { | func TestFoldAgainstUnicode(t *testing.T) { | ||||||
| 	var buf1, buf2 []byte | 	const bufSize = 5 | ||||||
|  | 	buf1 := make([]byte, 0, bufSize) | ||||||
|  | 	buf2 := make([]byte, 0, bufSize) | ||||||
| 	var runes []rune | 	var runes []rune | ||||||
| 	for i := 0x20; i <= 0x7f; i++ { | 	for i := 0x20; i <= 0x7f; i++ { | ||||||
| 		runes = append(runes, rune(i)) | 		runes = append(runes, rune(i)) | ||||||
| @@ -94,8 +96,12 @@ func TestFoldAgainstUnicode(t *testing.T) { | |||||||
| 				continue | 				continue | ||||||
| 			} | 			} | ||||||
| 			for _, r2 := range runes { | 			for _, r2 := range runes { | ||||||
| 				buf1 = append(utf8.AppendRune(append(buf1[:0], 'x'), r), 'x') | 				buf1 := append(buf1[:0], 'x') | ||||||
| 				buf2 = append(utf8.AppendRune(append(buf2[:0], 'x'), r2), 'x') | 				buf2 := append(buf2[:0], 'x') | ||||||
|  | 				buf1 = buf1[:1+utf8.EncodeRune(buf1[1:bufSize], r)] | ||||||
|  | 				buf2 = buf2[:1+utf8.EncodeRune(buf2[1:bufSize], r2)] | ||||||
|  | 				buf1 = append(buf1, 'x') | ||||||
|  | 				buf2 = append(buf2, 'x') | ||||||
| 				want := bytes.EqualFold(buf1, buf2) | 				want := bytes.EqualFold(buf1, buf2) | ||||||
| 				if got := ff.fold(buf1, buf2); got != want { | 				if got := ff.fold(buf1, buf2); got != want { | ||||||
| 					t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want) | 					t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want) | ||||||
|   | |||||||
| @@ -17,6 +17,7 @@ type GoJsonRender struct { | |||||||
| 	NilSafeSlices bool | 	NilSafeSlices bool | ||||||
| 	NilSafeMaps   bool | 	NilSafeMaps   bool | ||||||
| 	Indent        *IndentOpt | 	Indent        *IndentOpt | ||||||
|  | 	Filter        *string | ||||||
| } | } | ||||||
|  |  | ||||||
| func (r GoJsonRender) Render(w http.ResponseWriter) error { | func (r GoJsonRender) Render(w http.ResponseWriter) error { | ||||||
| @@ -25,7 +26,7 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error { | |||||||
| 		header["Content-Type"] = []string{"application/json; charset=utf-8"} | 		header["Content-Type"] = []string{"application/json; charset=utf-8"} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent) | 	jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		panic(err) | 		panic(err) | ||||||
| 	} | 	} | ||||||
| @@ -36,6 +37,14 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (r GoJsonRender) RenderString() (string, error) { | ||||||
|  | 	jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(jsonBytes), nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (r GoJsonRender) WriteContentType(w http.ResponseWriter) { | func (r GoJsonRender) WriteContentType(w http.ResponseWriter) { | ||||||
| 	header := w.Header() | 	header := w.Header() | ||||||
| 	if val := header["Content-Type"]; len(val) == 0 { | 	if val := header["Content-Type"]; len(val) == 0 { | ||||||
|   | |||||||
| @@ -116,3 +116,18 @@ func TestNumberIsValid(t *testing.T) { | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func BenchmarkNumberIsValid(b *testing.B) { | ||||||
|  | 	s := "-61657.61667E+61673" | ||||||
|  | 	for i := 0; i < b.N; i++ { | ||||||
|  | 		isValidNumber(s) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func BenchmarkNumberIsValidRegexp(b *testing.B) { | ||||||
|  | 	var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`) | ||||||
|  | 	s := "-61657.61667E+61673" | ||||||
|  | 	for i := 0; i < b.N; i++ { | ||||||
|  | 		jsonNumberRegexp.MatchString(s) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|   | |||||||
| @@ -594,7 +594,7 @@ func (s *scanner) error(c byte, context string) int { | |||||||
| 	return scanError | 	return scanError | ||||||
| } | } | ||||||
|  |  | ||||||
| // quoteChar formats c as a quoted character literal. | // quoteChar formats c as a quoted character literal | ||||||
| func quoteChar(c byte) string { | func quoteChar(c byte) string { | ||||||
| 	// special cases - different from quoted strings | 	// special cases - different from quoted strings | ||||||
| 	if c == '\'' { | 	if c == '\'' { | ||||||
|   | |||||||
| @@ -41,6 +41,9 @@ func (dec *Decoder) UseNumber() { dec.d.useNumber = true } | |||||||
| // non-ignored, exported fields in the destination. | // non-ignored, exported fields in the destination. | ||||||
| func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true } | func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true } | ||||||
|  |  | ||||||
|  | // TagKey sets a different TagKey (instead of "json") | ||||||
|  | func (dec *Decoder) TagKey(v string) { dec.d.tagkey = &v } | ||||||
|  |  | ||||||
| // Decode reads the next JSON-encoded value from its | // Decode reads the next JSON-encoded value from its | ||||||
| // input and stores it in the value pointed to by v. | // input and stores it in the value pointed to by v. | ||||||
| // | // | ||||||
| @@ -182,8 +185,6 @@ type Encoder struct { | |||||||
| 	w          io.Writer | 	w          io.Writer | ||||||
| 	err        error | 	err        error | ||||||
| 	escapeHTML bool | 	escapeHTML bool | ||||||
| 	nilSafeSlices bool |  | ||||||
| 	nilSafeMaps   bool |  | ||||||
|  |  | ||||||
| 	indentBuf    *bytes.Buffer | 	indentBuf    *bytes.Buffer | ||||||
| 	indentPrefix string | 	indentPrefix string | ||||||
| @@ -204,11 +205,8 @@ func (enc *Encoder) Encode(v any) error { | |||||||
| 	if enc.err != nil { | 	if enc.err != nil { | ||||||
| 		return enc.err | 		return enc.err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	e := newEncodeState() | 	e := newEncodeState() | ||||||
| 	defer encodeStatePool.Put(e) | 	err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML}) | ||||||
|  |  | ||||||
| 	err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML, nilSafeMaps: enc.nilSafeMaps, nilSafeSlices: enc.nilSafeSlices}) |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
| @@ -236,6 +234,7 @@ func (enc *Encoder) Encode(v any) error { | |||||||
| 	if _, err = enc.w.Write(b); err != nil { | 	if _, err = enc.w.Write(b); err != nil { | ||||||
| 		enc.err = err | 		enc.err = err | ||||||
| 	} | 	} | ||||||
|  | 	encodeStatePool.Put(e) | ||||||
| 	return err | 	return err | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -247,13 +246,6 @@ func (enc *Encoder) SetIndent(prefix, indent string) { | |||||||
| 	enc.indentValue = indent | 	enc.indentValue = indent | ||||||
| } | } | ||||||
|  |  | ||||||
| // SetNilSafeCollection specifies whether to represent nil slices and maps as |  | ||||||
| // '[]' or '{}' respectfully (flag on) instead of 'null' (default) when marshaling json. |  | ||||||
| func (enc *Encoder) SetNilSafeCollection(nilSafeSlices bool, nilSafeMaps bool) { |  | ||||||
| 	enc.nilSafeSlices = nilSafeSlices |  | ||||||
| 	enc.nilSafeMaps = nilSafeMaps |  | ||||||
| } |  | ||||||
|  |  | ||||||
| // SetEscapeHTML specifies whether problematic HTML characters | // SetEscapeHTML specifies whether problematic HTML characters | ||||||
| // should be escaped inside JSON quoted strings. | // should be escaped inside JSON quoted strings. | ||||||
| // The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e | // The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e | ||||||
|   | |||||||
| @@ -12,7 +12,6 @@ import ( | |||||||
| 	"net/http" | 	"net/http" | ||||||
| 	"net/http/httptest" | 	"net/http/httptest" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| 	"runtime/debug" |  | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"testing" | 	"testing" | ||||||
| ) | ) | ||||||
| @@ -42,7 +41,7 @@ false | |||||||
|  |  | ||||||
| func TestEncoder(t *testing.T) { | func TestEncoder(t *testing.T) { | ||||||
| 	for i := 0; i <= len(streamTest); i++ { | 	for i := 0; i <= len(streamTest); i++ { | ||||||
| 		var buf strings.Builder | 		var buf bytes.Buffer | ||||||
| 		enc := NewEncoder(&buf) | 		enc := NewEncoder(&buf) | ||||||
| 		// Check that enc.SetIndent("", "") turns off indentation. | 		// Check that enc.SetIndent("", "") turns off indentation. | ||||||
| 		enc.SetIndent(">", ".") | 		enc.SetIndent(">", ".") | ||||||
| @@ -60,43 +59,6 @@ func TestEncoder(t *testing.T) { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func TestEncoderErrorAndReuseEncodeState(t *testing.T) { |  | ||||||
| 	// Disable the GC temporarily to prevent encodeState's in Pool being cleaned away during the test. |  | ||||||
| 	percent := debug.SetGCPercent(-1) |  | ||||||
| 	defer debug.SetGCPercent(percent) |  | ||||||
|  |  | ||||||
| 	// Trigger an error in Marshal with cyclic data. |  | ||||||
| 	type Dummy struct { |  | ||||||
| 		Name string |  | ||||||
| 		Next *Dummy |  | ||||||
| 	} |  | ||||||
| 	dummy := Dummy{Name: "Dummy"} |  | ||||||
| 	dummy.Next = &dummy |  | ||||||
|  |  | ||||||
| 	var buf bytes.Buffer |  | ||||||
| 	enc := NewEncoder(&buf) |  | ||||||
| 	if err := enc.Encode(dummy); err == nil { |  | ||||||
| 		t.Errorf("Encode(dummy) == nil; want error") |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	type Data struct { |  | ||||||
| 		A string |  | ||||||
| 		I int |  | ||||||
| 	} |  | ||||||
| 	data := Data{A: "a", I: 1} |  | ||||||
| 	if err := enc.Encode(data); err != nil { |  | ||||||
| 		t.Errorf("Marshal(%v) = %v", data, err) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	var data2 Data |  | ||||||
| 	if err := Unmarshal(buf.Bytes(), &data2); err != nil { |  | ||||||
| 		t.Errorf("Unmarshal(%v) = %v", data2, err) |  | ||||||
| 	} |  | ||||||
| 	if data2 != data { |  | ||||||
| 		t.Errorf("expect: %v, but get: %v", data, data2) |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| var streamEncodedIndent = `0.1 | var streamEncodedIndent = `0.1 | ||||||
| "hello" | "hello" | ||||||
| null | null | ||||||
| @@ -115,7 +77,7 @@ false | |||||||
| ` | ` | ||||||
|  |  | ||||||
| func TestEncoderIndent(t *testing.T) { | func TestEncoderIndent(t *testing.T) { | ||||||
| 	var buf strings.Builder | 	var buf bytes.Buffer | ||||||
| 	enc := NewEncoder(&buf) | 	enc := NewEncoder(&buf) | ||||||
| 	enc.SetIndent(">", ".") | 	enc.SetIndent(">", ".") | ||||||
| 	for _, v := range streamTest { | 	for _, v := range streamTest { | ||||||
| @@ -185,7 +147,7 @@ func TestEncoderSetEscapeHTML(t *testing.T) { | |||||||
| 			`{"bar":"\"<html>foobar</html>\""}`, | 			`{"bar":"\"<html>foobar</html>\""}`, | ||||||
| 		}, | 		}, | ||||||
| 	} { | 	} { | ||||||
| 		var buf strings.Builder | 		var buf bytes.Buffer | ||||||
| 		enc := NewEncoder(&buf) | 		enc := NewEncoder(&buf) | ||||||
| 		if err := enc.Encode(tt.v); err != nil { | 		if err := enc.Encode(tt.v); err != nil { | ||||||
| 			t.Errorf("Encode(%s): %s", tt.name, err) | 			t.Errorf("Encode(%s): %s", tt.name, err) | ||||||
| @@ -347,6 +309,21 @@ func TestBlocking(t *testing.T) { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func BenchmarkEncoderEncode(b *testing.B) { | ||||||
|  | 	b.ReportAllocs() | ||||||
|  | 	type T struct { | ||||||
|  | 		X, Y string | ||||||
|  | 	} | ||||||
|  | 	v := &T{"foo", "bar"} | ||||||
|  | 	b.RunParallel(func(pb *testing.PB) { | ||||||
|  | 		for pb.Next() { | ||||||
|  | 			if err := NewEncoder(io.Discard).Encode(v); err != nil { | ||||||
|  | 				b.Fatal(err) | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	}) | ||||||
|  | } | ||||||
|  |  | ||||||
| type tokenStreamCase struct { | type tokenStreamCase struct { | ||||||
| 	json      string | 	json      string | ||||||
| 	expTokens []any | 	expTokens []any | ||||||
| @@ -495,45 +472,3 @@ func TestHTTPDecoding(t *testing.T) { | |||||||
| 		t.Errorf("err = %v; want io.EOF", err) | 		t.Errorf("err = %v; want io.EOF", err) | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func TestEncoderSetNilSafeCollection(t *testing.T) { |  | ||||||
| 	var ( |  | ||||||
| 		nilSlice  []interface{} |  | ||||||
| 		pNilSlice *[]interface{} |  | ||||||
| 		nilMap    map[string]interface{} |  | ||||||
| 		pNilMap   *map[string]interface{} |  | ||||||
| 	) |  | ||||||
| 	for _, tt := range []struct { |  | ||||||
| 		name        string |  | ||||||
| 		v           interface{} |  | ||||||
| 		want        string |  | ||||||
| 		rescuedWant string |  | ||||||
| 	}{ |  | ||||||
| 		{"nilSlice", nilSlice, "null", "[]"}, |  | ||||||
| 		{"nonNilSlice", []interface{}{}, "[]", "[]"}, |  | ||||||
| 		{"sliceWithValues", []interface{}{1, 2, 3}, "[1,2,3]", "[1,2,3]"}, |  | ||||||
| 		{"pNilSlice", pNilSlice, "null", "null"}, |  | ||||||
| 		{"nilMap", nilMap, "null", "{}"}, |  | ||||||
| 		{"nonNilMap", map[string]interface{}{}, "{}", "{}"}, |  | ||||||
| 		{"mapWithValues", map[string]interface{}{"1": 1, "2": 2, "3": 3}, "{\"1\":1,\"2\":2,\"3\":3}", "{\"1\":1,\"2\":2,\"3\":3}"}, |  | ||||||
| 		{"pNilMap", pNilMap, "null", "null"}, |  | ||||||
| 	} { |  | ||||||
| 		var buf bytes.Buffer |  | ||||||
| 		enc := NewEncoder(&buf) |  | ||||||
| 		if err := enc.Encode(tt.v); err != nil { |  | ||||||
| 			t.Fatalf("Encode(%s): %s", tt.name, err) |  | ||||||
| 		} |  | ||||||
| 		if got := strings.TrimSpace(buf.String()); got != tt.want { |  | ||||||
| 			t.Errorf("Encode(%s) = %#q, want %#q", tt.name, got, tt.want) |  | ||||||
| 		} |  | ||||||
| 		buf.Reset() |  | ||||||
| 		enc.SetNilSafeCollection(true, true) |  | ||||||
| 		if err := enc.Encode(tt.v); err != nil { |  | ||||||
| 			t.Fatalf("SetNilSafeCollection(true) Encode(%s): %s", tt.name, err) |  | ||||||
| 		} |  | ||||||
| 		if got := strings.TrimSpace(buf.String()); got != tt.rescuedWant { |  | ||||||
| 			t.Errorf("SetNilSafeCollection(true) Encode(%s) = %#q, want %#q", |  | ||||||
| 				tt.name, got, tt.want) |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
|   | |||||||
							
								
								
									
										54
									
								
								googleapi/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								googleapi/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | |||||||
|  |  | ||||||
|  | Google OAuth Setup (to send mails) | ||||||
|  | ================================== | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  - Login @ https://console.cloud.google.com | ||||||
|  |  | ||||||
|  |  - GMail API akivieren: https://console.cloud.google.com/apis/library/gmail.googleapis.com? | ||||||
|  |  | ||||||
|  |  - Create new Project (aka 'BackendMailAPI') @ https://console.cloud.google.com/projectcreate | ||||||
|  |    User Type: Intern | ||||||
|  |    Anwendungsname: 'BackendMailAPI' | ||||||
|  |    Support-Email: ... | ||||||
|  |    Authorisierte Domains: 'heydyno.de' (or project domain) | ||||||
|  |    Kontakt-Email: ... | ||||||
|  |     | ||||||
|  |  | ||||||
|  |  - Unter "Anmeldedaten" neuer OAuth Client erstellen @ https://console.cloud.google.com/apis/credentials | ||||||
|  |    Anwendungstyp: Web | ||||||
|  |    Name: 'BackendMailOAuth' | ||||||
|  |    Redirect-Uri: 'http://localhost/oauth' | ||||||
|  |    Client-ID und Client-Key merken | ||||||
|  |  | ||||||
|  |  - Open in Browser: | ||||||
|  |    https://accounts.google.com/o/oauth2/v2/auth?redirect_uri=http://localhost/oauth&prompt=consent&response_type=code&client_id={...}&scope=https://www.googleapis.com/auth/gmail.send&access_type=offline | ||||||
|  |    Code aus redirected URI merken | ||||||
|  |  | ||||||
|  |  - Code via request einlösen (und refresh_roken merken): | ||||||
|  |  | ||||||
|  | ``` | ||||||
|  | curl --request POST \ | ||||||
|  |   --url https://oauth2.googleapis.com/token \ | ||||||
|  |   --data code={...} \ | ||||||
|  |   --data redirect_uri=http://localhost/oauth \ | ||||||
|  |   --data client_id={...} \ | ||||||
|  |   --data client_secret={...} \ | ||||||
|  |   --data grant_type=authorization_code \ | ||||||
|  |   --data scope=https://www.googleapis.com/auth/gmail.send | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  |  - Fertig, mit `client_id`, `client_secret` und `refresh_token` kann das package benutzt werden | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
							
								
								
									
										46
									
								
								googleapi/attachment.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								googleapi/attachment.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/base64" | ||||||
|  | 	"fmt" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type MailAttachment struct { | ||||||
|  | 	IsInline    bool | ||||||
|  | 	ContentType string | ||||||
|  | 	Filename    string | ||||||
|  | 	Data        []byte | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a MailAttachment) dump() []string { | ||||||
|  | 	res := make([]string, 0, 4) | ||||||
|  |  | ||||||
|  | 	if a.ContentType != "" { | ||||||
|  | 		res = append(res, "Content-Type: "+a.ContentType+"; charset=UTF-8") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	res = append(res, "Content-Transfer-Encoding: base64") | ||||||
|  |  | ||||||
|  | 	if a.IsInline { | ||||||
|  | 		if a.Filename != "" { | ||||||
|  | 			res = append(res, fmt.Sprintf("Content-Disposition: inline;filename=\"%s\"", a.Filename)) | ||||||
|  | 		} else { | ||||||
|  | 			res = append(res, "Content-Disposition: inline") | ||||||
|  | 		} | ||||||
|  | 	} else { | ||||||
|  | 		if a.Filename != "" { | ||||||
|  | 			res = append(res, fmt.Sprintf("Content-Disposition: attachment;filename=\"%s\"", a.Filename)) | ||||||
|  | 		} else { | ||||||
|  | 			res = append(res, "Content-Disposition: attachment") | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b64 := base64.StdEncoding.EncodeToString(a.Data) | ||||||
|  | 	for i := 0; i < len(b64); i += 80 { | ||||||
|  | 		res = append(res, b64[i:min(i+80, len(b64))]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	res = append(res) | ||||||
|  |  | ||||||
|  | 	return res | ||||||
|  | } | ||||||
							
								
								
									
										6
									
								
								googleapi/body.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								googleapi/body.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | type MailBody struct { | ||||||
|  | 	Plain string | ||||||
|  | 	HTML  string | ||||||
|  | } | ||||||
							
								
								
									
										224
									
								
								googleapi/mimeMessage.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										224
									
								
								googleapi/mimeMessage.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,224 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"mime" | ||||||
|  | 	"strings" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // https://datatracker.ietf.org/doc/html/rfc2822 | ||||||
|  | func encodeMimeMail(from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) string { | ||||||
|  |  | ||||||
|  | 	data := make([]string, 0, 32) | ||||||
|  |  | ||||||
|  | 	data = append(data, "Date: "+time.Now().Format(time.RFC1123Z)) | ||||||
|  | 	data = append(data, "MIME-Version: 1.0") | ||||||
|  | 	data = append(data, "From: "+mime.QEncoding.Encode("UTF-8", from)) | ||||||
|  | 	data = append(data, "To: "+strings.Join(langext.ArrMap(recipients, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", ")) | ||||||
|  | 	if len(cc) > 0 { | ||||||
|  | 		data = append(data, "To: "+strings.Join(langext.ArrMap(cc, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", ")) | ||||||
|  | 	} | ||||||
|  | 	if len(bcc) > 0 { | ||||||
|  | 		data = append(data, "Bcc: "+strings.Join(langext.ArrMap(bcc, func(v string) string { return mime.QEncoding.Encode("UTF-8", v) }), ", ")) | ||||||
|  | 	} | ||||||
|  | 	data = append(data, "Subject: "+mime.QEncoding.Encode("UTF-8", subject)) | ||||||
|  |  | ||||||
|  | 	hasInlineAttachments := langext.ArrAny(attachments, func(v MailAttachment) bool { return v.IsInline }) | ||||||
|  | 	hasNormalAttachments := langext.ArrAny(attachments, func(v MailAttachment) bool { return !v.IsInline }) | ||||||
|  | 	hasPlain := body.Plain != "" | ||||||
|  | 	hasHTML := body.HTML != "" | ||||||
|  |  | ||||||
|  | 	mixedBoundary := langext.MustRawHexUUID() | ||||||
|  | 	relatedBoundary := langext.MustRawHexUUID() | ||||||
|  | 	altBoundary := langext.MustRawHexUUID() | ||||||
|  |  | ||||||
|  | 	inlineAttachments := langext.ArrFilter(attachments, func(v MailAttachment) bool { return v.IsInline }) | ||||||
|  | 	normalAttachments := langext.ArrFilter(attachments, func(v MailAttachment) bool { return !v.IsInline }) | ||||||
|  |  | ||||||
|  | 	if hasInlineAttachments && hasNormalAttachments { | ||||||
|  | 		// "mixed+related" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/mixed; boundary="+mixedBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+mixedBoundary) | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/related; boundary="+relatedBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, relatedBoundary, altBoundary)...) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		for i, attachment := range inlineAttachments { | ||||||
|  | 			data = append(data, "--"+relatedBoundary) | ||||||
|  | 			data = append(data, attachment.dump()...) | ||||||
|  |  | ||||||
|  | 			if i < len(inlineAttachments)-1 { | ||||||
|  | 				data = append(data, "") | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+relatedBoundary+"--") | ||||||
|  |  | ||||||
|  | 		for i, attachment := range normalAttachments { | ||||||
|  | 			data = append(data, "--"+mixedBoundary) | ||||||
|  | 			data = append(data, attachment.dump()...) | ||||||
|  |  | ||||||
|  | 			if i < len(normalAttachments)-1 { | ||||||
|  | 				data = append(data, "") | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+mixedBoundary+"--") | ||||||
|  |  | ||||||
|  | 	} else if hasNormalAttachments { | ||||||
|  | 		// "mixed" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/mixed; boundary="+mixedBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, mixedBoundary, altBoundary)...) | ||||||
|  | 		if hasPlain && hasHTML { | ||||||
|  | 			data = append(data, "") | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		for i, attachment := range normalAttachments { | ||||||
|  | 			data = append(data, "--"+mixedBoundary) | ||||||
|  | 			data = append(data, attachment.dump()...) | ||||||
|  |  | ||||||
|  | 			if i < len(normalAttachments)-1 { | ||||||
|  | 				data = append(data, "") | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+mixedBoundary+"--") | ||||||
|  |  | ||||||
|  | 	} else if hasInlineAttachments { | ||||||
|  | 		// "related" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/related; boundary="+relatedBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, relatedBoundary, altBoundary)...) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		for i, attachment := range inlineAttachments { | ||||||
|  | 			data = append(data, "--"+relatedBoundary) | ||||||
|  | 			data = append(data, attachment.dump()...) | ||||||
|  |  | ||||||
|  | 			if i < len(inlineAttachments)-1 { | ||||||
|  | 				data = append(data, "") | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+relatedBoundary+"--") | ||||||
|  |  | ||||||
|  | 	} else if hasPlain && hasHTML { | ||||||
|  | 		// "alternative" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: multipart/alternative; boundary="+altBoundary) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, dumpMailBody(body, hasInlineAttachments, hasNormalAttachments, altBoundary, altBoundary)...) | ||||||
|  | 		data = append(data, "") | ||||||
|  |  | ||||||
|  | 		data = append(data, "--"+altBoundary+"--") | ||||||
|  |  | ||||||
|  | 	} else if hasPlain { | ||||||
|  | 		// "plain" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.Plain) | ||||||
|  |  | ||||||
|  | 	} else if hasHTML { | ||||||
|  | 		// "plain" | ||||||
|  |  | ||||||
|  | 		data = append(data, "Content-Type: text/html; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  | 		// "empty??" | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return strings.Join(data, "\r\n") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func dumpMailBody(body MailBody, hasInlineAttachments bool, hasNormalAttachments bool, boundary string, boundaryAlt string) []string { | ||||||
|  |  | ||||||
|  | 	if body.HTML != "" && body.Plain != "" && !hasInlineAttachments && hasNormalAttachments { | ||||||
|  | 		data := make([]string, 0, 16) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: multipart/alternative; boundary="+boundaryAlt) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+boundaryAlt) | ||||||
|  | 		data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.Plain) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+boundaryAlt) | ||||||
|  | 		data = append(data, "Content-Type: text/html; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+boundaryAlt+"--") | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if body.HTML != "" && body.Plain != "" && hasInlineAttachments { | ||||||
|  | 		data := make([]string, 0, 2) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if body.HTML != "" && body.Plain != "" { | ||||||
|  | 		data := make([]string, 0, 8) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.Plain) | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: text/html; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if body.HTML != "" { | ||||||
|  | 		data := make([]string, 0, 2) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: text/html; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.HTML) | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if body.Plain != "" { | ||||||
|  | 		data := make([]string, 0, 2) | ||||||
|  | 		data = append(data, "--"+boundary) | ||||||
|  | 		data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 		data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 		data = append(data, "") | ||||||
|  | 		data = append(data, body.Plain) | ||||||
|  | 		return data | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	data := make([]string, 0, 16) | ||||||
|  | 	data = append(data, "--"+boundary) | ||||||
|  | 	data = append(data, "Content-Type: text/plain; charset=UTF-8") | ||||||
|  | 	data = append(data, "Content-Transfer-Encoding: 7bit") | ||||||
|  | 	data = append(data, "") | ||||||
|  | 	data = append(data, "") // no content ?!? | ||||||
|  | 	return data | ||||||
|  | } | ||||||
							
								
								
									
										80
									
								
								googleapi/mimeMessage_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								googleapi/mimeMessage_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestEncodeMimeMail(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	mail := encodeMimeMail( | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail", | ||||||
|  | 		MailBody{Plain: "Plain Text"}, | ||||||
|  | 		nil) | ||||||
|  |  | ||||||
|  | 	verifyMime(mail) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestEncodeMimeMail2(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	mail := encodeMimeMail( | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (alternative)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			Plain: "Plain Text", | ||||||
|  | 			HTML:  "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		nil) | ||||||
|  |  | ||||||
|  | 	verifyMime(mail) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestEncodeMimeMail3(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	mail := encodeMimeMail( | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (alternative)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		[]MailAttachment{ | ||||||
|  | 			{Data: []byte("HelloWorld"), Filename: "test.txt", IsInline: false, ContentType: "text/plain"}, | ||||||
|  | 		}) | ||||||
|  |  | ||||||
|  | 	verifyMime(mail) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestEncodeMimeMail4(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	b := tst.Must(os.ReadFile("test_placeholder.png"))(t) | ||||||
|  |  | ||||||
|  | 	mail := encodeMimeMail( | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (inline)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		[]MailAttachment{ | ||||||
|  | 			{Data: b, Filename: "img.png", IsInline: true, ContentType: "image/png"}, | ||||||
|  | 		}) | ||||||
|  |  | ||||||
|  | 	verifyMime(mail) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func verifyMime(mail string) { | ||||||
|  | 	//fmt.Printf("%s\n\n", mail) | ||||||
|  | } | ||||||
							
								
								
									
										91
									
								
								googleapi/oAuth.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										91
									
								
								googleapi/oAuth.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,91 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"sync" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type GoogleOAuth interface { | ||||||
|  | 	AccessToken() (string, error) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type oauth struct { | ||||||
|  | 	clientID     string | ||||||
|  | 	clientSecret string | ||||||
|  | 	refreshToken string | ||||||
|  |  | ||||||
|  | 	lock        sync.RWMutex | ||||||
|  | 	accessToken *string | ||||||
|  | 	expiryDate  *time.Time | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewGoogleOAuth(clientid string, clientsecret, refreshtoken string) GoogleOAuth { | ||||||
|  | 	return &oauth{ | ||||||
|  | 		clientID:     clientid, | ||||||
|  | 		clientSecret: clientsecret, | ||||||
|  | 		refreshToken: refreshtoken, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *oauth) AccessToken() (string, error) { | ||||||
|  | 	c.lock.RLock() | ||||||
|  | 	if c.accessToken != nil && c.expiryDate != nil && (*c.expiryDate).After(time.Now()) { | ||||||
|  | 		c.lock.RUnlock() | ||||||
|  | 		return *c.accessToken, nil // still valid | ||||||
|  | 	} | ||||||
|  | 	c.lock.RUnlock() | ||||||
|  |  | ||||||
|  | 	httpclient := http.Client{} | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("https://oauth2.googleapis.com/token?client_id=%s&client_secret=%s&grant_type=%s&refresh_token=%s", | ||||||
|  | 		c.clientID, | ||||||
|  | 		c.clientSecret, | ||||||
|  | 		"refresh_token", | ||||||
|  | 		c.refreshToken) | ||||||
|  |  | ||||||
|  | 	req, err := http.NewRequest(http.MethodPost, url, nil) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	reqStartTime := time.Now() | ||||||
|  |  | ||||||
|  | 	res, err := httpclient.Do(req) | ||||||
|  |  | ||||||
|  | 	type response struct { | ||||||
|  | 		AccessToken string `json:"access_token"` | ||||||
|  | 		ExpiresIn   int    `json:"expires_in"` | ||||||
|  | 		Scope       string `json:"scope"` | ||||||
|  | 		TokenType   string `json:"token_type"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var r response | ||||||
|  |  | ||||||
|  | 	data, err := io.ReadAll(res.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = json.Unmarshal(data, &r) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "", err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if r.ExpiresIn == 0 || r.AccessToken == "" { | ||||||
|  | 		return "", exerr.New(exerr.TypeGoogleResponse, "google oauth returned no response").Str("body", string(data)).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	c.lock.Lock() | ||||||
|  | 	c.expiryDate = langext.Ptr(reqStartTime.Add(timeext.FromSeconds(r.ExpiresIn - 10))) | ||||||
|  | 	c.accessToken = langext.Ptr(r.AccessToken) | ||||||
|  | 	c.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	return r.AccessToken, nil | ||||||
|  | } | ||||||
							
								
								
									
										69
									
								
								googleapi/sendMail.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										69
									
								
								googleapi/sendMail.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,69 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"context" | ||||||
|  | 	"encoding/base64" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type MailRef struct { | ||||||
|  | 	ID       string   `json:"id"` | ||||||
|  | 	ThreadID string   `json:"threadId"` | ||||||
|  | 	LabelIDs []string `json:"labelIds"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *client) SendMail(ctx context.Context, from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) (MailRef, error) { | ||||||
|  |  | ||||||
|  | 	mm := encodeMimeMail(from, recipients, cc, bcc, subject, body, attachments) | ||||||
|  |  | ||||||
|  | 	tok, err := c.oauth.AccessToken() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("https://gmail.googleapis.com/gmail/v1/users/%s/messages/send?alt=json&prettyPrint=false", "me") | ||||||
|  |  | ||||||
|  | 	msgbody, err := json.Marshal(langext.H{"raw": base64.URLEncoding.EncodeToString([]byte(mm))}) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(msgbody)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	req.Header.Add("Authorization", "Bearer "+tok) | ||||||
|  | 	req.Header.Add("X-Goog-Api-Client", "blackforestbytes-goext/"+goext.GoextVersion) | ||||||
|  | 	req.Header.Add("User-Agent", "blackforestbytes-goext/"+goext.GoextVersion) | ||||||
|  | 	req.Header.Add("Content-Type", "application/json") | ||||||
|  |  | ||||||
|  | 	resp, err := c.http.Do(req) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	respBody, err := io.ReadAll(resp.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if resp.StatusCode != 200 { | ||||||
|  | 		return MailRef{}, exerr.New(exerr.TypeGoogleStatuscode, "gmail returned non-200 statuscode").Int("sc", resp.StatusCode).Str("body", string(respBody)).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var respObj MailRef | ||||||
|  | 	err = json.Unmarshal(respBody, &respObj) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return MailRef{}, exerr.Wrap(err, "").Str("body", string(respBody)).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return respObj, nil | ||||||
|  | } | ||||||
							
								
								
									
										151
									
								
								googleapi/sendMail_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										151
									
								
								googleapi/sendMail_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,151 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"os" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestMain(m *testing.M) { | ||||||
|  | 	if !exerr.Initialized() { | ||||||
|  | 		exerr.Init(exerr.ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse}) | ||||||
|  | 	} | ||||||
|  | 	os.Exit(m.Run()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSendMail1(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	auth := NewGoogleOAuth( | ||||||
|  | 		"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com", | ||||||
|  | 		"TODO", | ||||||
|  | 		"TODO") | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	gclient := NewGoogleClient(auth) | ||||||
|  |  | ||||||
|  | 	mail, err := gclient.SendMail( | ||||||
|  | 		ctx, | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail", | ||||||
|  | 		MailBody{Plain: "Plain Text"}, | ||||||
|  | 		nil) | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("mail.ID        := %s\n", mail.ID) | ||||||
|  | 	fmt.Printf("mail.ThreadID  := %s\n", mail.ThreadID) | ||||||
|  | 	fmt.Printf("mail.LabelIDs  := %v\n", mail.LabelIDs) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSendMail2(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	auth := NewGoogleOAuth( | ||||||
|  | 		"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com", | ||||||
|  | 		"TODO", | ||||||
|  | 		"TODO") | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	gclient := NewGoogleClient(auth) | ||||||
|  |  | ||||||
|  | 	mail, err := gclient.SendMail( | ||||||
|  | 		ctx, | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (alternative)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			Plain: "Plain Text", | ||||||
|  | 			HTML:  "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		nil) | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("mail.ID        := %s\n", mail.ID) | ||||||
|  | 	fmt.Printf("mail.ThreadID  := %s\n", mail.ThreadID) | ||||||
|  | 	fmt.Printf("mail.LabelIDs  := %v\n", mail.LabelIDs) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSendMail3(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	auth := NewGoogleOAuth( | ||||||
|  | 		"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com", | ||||||
|  | 		"TODO", | ||||||
|  | 		"TODO") | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	gclient := NewGoogleClient(auth) | ||||||
|  |  | ||||||
|  | 	mail, err := gclient.SendMail( | ||||||
|  | 		ctx, | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (attach)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		[]MailAttachment{ | ||||||
|  | 			{Data: []byte("HelloWorld"), Filename: "test.txt", IsInline: false, ContentType: "text/plain"}, | ||||||
|  | 		}) | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("mail.ID        := %s\n", mail.ID) | ||||||
|  | 	fmt.Printf("mail.ThreadID  := %s\n", mail.ThreadID) | ||||||
|  | 	fmt.Printf("mail.LabelIDs  := %v\n", mail.LabelIDs) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestSendMail4(t *testing.T) { | ||||||
|  | 	t.Skip() | ||||||
|  | 	return | ||||||
|  |  | ||||||
|  | 	auth := NewGoogleOAuth( | ||||||
|  | 		"554617284247-8di0j6s5dcmlk4lmk4hdf9kdn8scss54.apps.googleusercontent.com", | ||||||
|  | 		"TODO", | ||||||
|  | 		"TODO") | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	gclient := NewGoogleClient(auth) | ||||||
|  |  | ||||||
|  | 	b := tst.Must(os.ReadFile("test_placeholder.png"))(t) | ||||||
|  |  | ||||||
|  | 	mail, err := gclient.SendMail( | ||||||
|  | 		ctx, | ||||||
|  | 		"noreply@heydyno.de", | ||||||
|  | 		[]string{"trash@mikescher.de"}, | ||||||
|  | 		nil, | ||||||
|  | 		nil, | ||||||
|  | 		"Hello Test Mail (inline)", | ||||||
|  | 		MailBody{ | ||||||
|  | 			HTML: "<html><body><u>Non</u> Pl<i>ai</i>n T<b>ex</b>t</body></html>", | ||||||
|  | 		}, | ||||||
|  | 		[]MailAttachment{ | ||||||
|  | 			{Data: b, Filename: "img.png", IsInline: true, ContentType: "image/png"}, | ||||||
|  | 		}) | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	fmt.Printf("mail.ID        := %s\n", mail.ID) | ||||||
|  | 	fmt.Printf("mail.ThreadID  := %s\n", mail.ThreadID) | ||||||
|  | 	fmt.Printf("mail.LabelIDs  := %v\n", mail.LabelIDs) | ||||||
|  | } | ||||||
							
								
								
									
										22
									
								
								googleapi/service.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								googleapi/service.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | |||||||
|  | package googleapi | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"net/http" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type GoogleClient interface { | ||||||
|  | 	SendMail(ctx context.Context, from string, recipients []string, cc []string, bcc []string, subject string, body MailBody, attachments []MailAttachment) (MailRef, error) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type client struct { | ||||||
|  | 	oauth GoogleOAuth | ||||||
|  | 	http  http.Client | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewGoogleClient(oauth GoogleOAuth) GoogleClient { | ||||||
|  | 	return &client{ | ||||||
|  | 		oauth: oauth, | ||||||
|  | 		http:  http.Client{}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										
											BIN
										
									
								
								googleapi/test_placeholder.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								googleapi/test_placeholder.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 11 KiB | 
							
								
								
									
										3
									
								
								imageext/enums.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								imageext/enums.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | |||||||
|  | package imageext | ||||||
|  |  | ||||||
|  | //go:generate go run ../_gen/enum-generate.go -- enums_gen.go | ||||||
							
								
								
									
										216
									
								
								imageext/enums_gen.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										216
									
								
								imageext/enums_gen.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,216 @@ | |||||||
|  | // Code generated by enum-generate.go DO NOT EDIT. | ||||||
|  |  | ||||||
|  | package imageext | ||||||
|  |  | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | import "gogs.mikescher.com/BlackForestBytes/goext/enums" | ||||||
|  |  | ||||||
|  | const ChecksumEnumGenerator = "1da5383c33ee442fd0b899369053f66bdc85bed2dbf906949d3edfeedfe13340" // GoExtVersion: 0.0.449 | ||||||
|  |  | ||||||
|  | // ================================ ImageFit ================================ | ||||||
|  | // | ||||||
|  | // File:       image.go | ||||||
|  | // StringEnum: true | ||||||
|  | // DescrEnum:  false | ||||||
|  | // DataEnum:   false | ||||||
|  | // | ||||||
|  |  | ||||||
|  | var __ImageFitValues = []ImageFit{ | ||||||
|  | 	ImageFitStretch, | ||||||
|  | 	ImageFitCover, | ||||||
|  | 	ImageFitContainCenter, | ||||||
|  | 	ImageFitContainTopLeft, | ||||||
|  | 	ImageFitContainTopRight, | ||||||
|  | 	ImageFitContainBottomLeft, | ||||||
|  | 	ImageFitContainBottomRight, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var __ImageFitVarnames = map[ImageFit]string{ | ||||||
|  | 	ImageFitStretch:            "ImageFitStretch", | ||||||
|  | 	ImageFitCover:              "ImageFitCover", | ||||||
|  | 	ImageFitContainCenter:      "ImageFitContainCenter", | ||||||
|  | 	ImageFitContainTopLeft:     "ImageFitContainTopLeft", | ||||||
|  | 	ImageFitContainTopRight:    "ImageFitContainTopRight", | ||||||
|  | 	ImageFitContainBottomLeft:  "ImageFitContainBottomLeft", | ||||||
|  | 	ImageFitContainBottomRight: "ImageFitContainBottomRight", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) Valid() bool { | ||||||
|  | 	return langext.InArray(e, __ImageFitValues) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) Values() []ImageFit { | ||||||
|  | 	return __ImageFitValues | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) ValuesAny() []any { | ||||||
|  | 	return langext.ArrCastToAny(__ImageFitValues) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return ImageFitValuesMeta() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) String() string { | ||||||
|  | 	return string(e) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) VarName() string { | ||||||
|  | 	if d, ok := __ImageFitVarnames[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) TypeName() string { | ||||||
|  | 	return "ImageFit" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) PackageName() string { | ||||||
|  | 	return "media" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageFit) Meta() enums.EnumMetaValue { | ||||||
|  | 	return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ParseImageFit(vv string) (ImageFit, bool) { | ||||||
|  | 	for _, ev := range __ImageFitValues { | ||||||
|  | 		if string(ev) == vv { | ||||||
|  | 			return ev, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ImageFitValues() []ImageFit { | ||||||
|  | 	return __ImageFitValues | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ImageFitValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return []enums.EnumMetaValue{ | ||||||
|  | 		ImageFitStretch.Meta(), | ||||||
|  | 		ImageFitCover.Meta(), | ||||||
|  | 		ImageFitContainCenter.Meta(), | ||||||
|  | 		ImageFitContainTopLeft.Meta(), | ||||||
|  | 		ImageFitContainTopRight.Meta(), | ||||||
|  | 		ImageFitContainBottomLeft.Meta(), | ||||||
|  | 		ImageFitContainBottomRight.Meta(), | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ================================ ImageCompresson ================================ | ||||||
|  | // | ||||||
|  | // File:       image.go | ||||||
|  | // StringEnum: true | ||||||
|  | // DescrEnum:  false | ||||||
|  | // DataEnum:   false | ||||||
|  | // | ||||||
|  |  | ||||||
|  | var __ImageCompressonValues = []ImageCompresson{ | ||||||
|  | 	CompressionPNGNone, | ||||||
|  | 	CompressionPNGSpeed, | ||||||
|  | 	CompressionPNGBest, | ||||||
|  | 	CompressionJPEG100, | ||||||
|  | 	CompressionJPEG90, | ||||||
|  | 	CompressionJPEG80, | ||||||
|  | 	CompressionJPEG70, | ||||||
|  | 	CompressionJPEG60, | ||||||
|  | 	CompressionJPEG50, | ||||||
|  | 	CompressionJPEG25, | ||||||
|  | 	CompressionJPEG10, | ||||||
|  | 	CompressionJPEG1, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var __ImageCompressonVarnames = map[ImageCompresson]string{ | ||||||
|  | 	CompressionPNGNone:  "CompressionPNGNone", | ||||||
|  | 	CompressionPNGSpeed: "CompressionPNGSpeed", | ||||||
|  | 	CompressionPNGBest:  "CompressionPNGBest", | ||||||
|  | 	CompressionJPEG100:  "CompressionJPEG100", | ||||||
|  | 	CompressionJPEG90:   "CompressionJPEG90", | ||||||
|  | 	CompressionJPEG80:   "CompressionJPEG80", | ||||||
|  | 	CompressionJPEG70:   "CompressionJPEG70", | ||||||
|  | 	CompressionJPEG60:   "CompressionJPEG60", | ||||||
|  | 	CompressionJPEG50:   "CompressionJPEG50", | ||||||
|  | 	CompressionJPEG25:   "CompressionJPEG25", | ||||||
|  | 	CompressionJPEG10:   "CompressionJPEG10", | ||||||
|  | 	CompressionJPEG1:    "CompressionJPEG1", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) Valid() bool { | ||||||
|  | 	return langext.InArray(e, __ImageCompressonValues) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) Values() []ImageCompresson { | ||||||
|  | 	return __ImageCompressonValues | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) ValuesAny() []any { | ||||||
|  | 	return langext.ArrCastToAny(__ImageCompressonValues) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) ValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return ImageCompressonValuesMeta() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) String() string { | ||||||
|  | 	return string(e) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) VarName() string { | ||||||
|  | 	if d, ok := __ImageCompressonVarnames[e]; ok { | ||||||
|  | 		return d | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) TypeName() string { | ||||||
|  | 	return "ImageCompresson" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) PackageName() string { | ||||||
|  | 	return "media" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (e ImageCompresson) Meta() enums.EnumMetaValue { | ||||||
|  | 	return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ParseImageCompresson(vv string) (ImageCompresson, bool) { | ||||||
|  | 	for _, ev := range __ImageCompressonValues { | ||||||
|  | 		if string(ev) == vv { | ||||||
|  | 			return ev, true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return "", false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ImageCompressonValues() []ImageCompresson { | ||||||
|  | 	return __ImageCompressonValues | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ImageCompressonValuesMeta() []enums.EnumMetaValue { | ||||||
|  | 	return []enums.EnumMetaValue{ | ||||||
|  | 		CompressionPNGNone.Meta(), | ||||||
|  | 		CompressionPNGSpeed.Meta(), | ||||||
|  | 		CompressionPNGBest.Meta(), | ||||||
|  | 		CompressionJPEG100.Meta(), | ||||||
|  | 		CompressionJPEG90.Meta(), | ||||||
|  | 		CompressionJPEG80.Meta(), | ||||||
|  | 		CompressionJPEG70.Meta(), | ||||||
|  | 		CompressionJPEG60.Meta(), | ||||||
|  | 		CompressionJPEG50.Meta(), | ||||||
|  | 		CompressionJPEG25.Meta(), | ||||||
|  | 		CompressionJPEG10.Meta(), | ||||||
|  | 		CompressionJPEG1.Meta(), | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ================================ ================= ================================ | ||||||
|  |  | ||||||
|  | func AllPackageEnums() []enums.Enum { | ||||||
|  | 	return []enums.Enum{ | ||||||
|  | 		ImageFitStretch,    // ImageFit | ||||||
|  | 		CompressionPNGNone, // ImageCompresson | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										321
									
								
								imageext/image.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										321
									
								
								imageext/image.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,321 @@ | |||||||
|  | package imageext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"fmt" | ||||||
|  | 	"github.com/disintegration/imaging" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/mathext" | ||||||
|  | 	"image" | ||||||
|  | 	"image/color" | ||||||
|  | 	"image/draw" | ||||||
|  | 	"image/jpeg" | ||||||
|  | 	"image/png" | ||||||
|  | 	"io" | ||||||
|  | 	"math" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ImageFit string //@enum:type | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	ImageFitStretch            ImageFit = "STRETCH" | ||||||
|  | 	ImageFitCover              ImageFit = "COVER" | ||||||
|  | 	ImageFitContainCenter      ImageFit = "CONTAIN_CENTER" | ||||||
|  | 	ImageFitContainTopLeft     ImageFit = "CONTAIN_TOPLEFT" | ||||||
|  | 	ImageFitContainTopRight    ImageFit = "CONTAIN_TOPRIGHT" | ||||||
|  | 	ImageFitContainBottomLeft  ImageFit = "CONTAIN_BOTTOMLEFT" | ||||||
|  | 	ImageFitContainBottomRight ImageFit = "CONTAIN_BOTTOMRIGHT" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ImageCrop struct { // all crop values are percentages! | ||||||
|  |  | ||||||
|  | 	CropX      float64 `bson:"cropX"      json:"cropX"` | ||||||
|  | 	CropY      float64 `bson:"cropY"      json:"cropY"` | ||||||
|  | 	CropWidth  float64 `bson:"cropWidth"  json:"cropWidth"` | ||||||
|  | 	CropHeight float64 `bson:"cropHeight" json:"cropHeight"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ImageCompresson string //@enum:type | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	CompressionPNGNone  ImageCompresson = "PNG_NONE" | ||||||
|  | 	CompressionPNGSpeed ImageCompresson = "PNG_SPEED" | ||||||
|  | 	CompressionPNGBest  ImageCompresson = "PNG_BEST" | ||||||
|  | 	CompressionJPEG100  ImageCompresson = "JPEG_100" | ||||||
|  | 	CompressionJPEG90   ImageCompresson = "JPEG_090" | ||||||
|  | 	CompressionJPEG80   ImageCompresson = "JPEG_080" | ||||||
|  | 	CompressionJPEG70   ImageCompresson = "JPEG_070" | ||||||
|  | 	CompressionJPEG60   ImageCompresson = "JPEG_060" | ||||||
|  | 	CompressionJPEG50   ImageCompresson = "JPEG_050" | ||||||
|  | 	CompressionJPEG25   ImageCompresson = "JPEG_025" | ||||||
|  | 	CompressionJPEG10   ImageCompresson = "JPEG_010" | ||||||
|  | 	CompressionJPEG1    ImageCompresson = "JPEG_001" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func CropImage(img image.Image, px float64, py float64, pw float64, ph float64) (image.Image, error) { | ||||||
|  |  | ||||||
|  | 	type subImager interface { | ||||||
|  | 		SubImage(r image.Rectangle) image.Image | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	x := int(float64(img.Bounds().Dx()) * px) | ||||||
|  | 	y := int(float64(img.Bounds().Dy()) * py) | ||||||
|  | 	w := int(float64(img.Bounds().Dx()) * pw) | ||||||
|  | 	h := int(float64(img.Bounds().Dy()) * ph) | ||||||
|  |  | ||||||
|  | 	if simg, ok := img.(subImager); ok { | ||||||
|  |  | ||||||
|  | 		return simg.SubImage(image.Rect(x, y, x+w, y+h)), nil | ||||||
|  |  | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		bfr1 := bytes.Buffer{} | ||||||
|  | 		err := png.Encode(&bfr1, img) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		imgPNG, err := png.Decode(&bfr1) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return imgPNG.(subImager).SubImage(image.Rect(x, y, w+w, y+h)), nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func EncodeImage(img image.Image, compression ImageCompresson) (bytes.Buffer, string, error) { | ||||||
|  | 	var err error | ||||||
|  |  | ||||||
|  | 	bfr := bytes.Buffer{} | ||||||
|  |  | ||||||
|  | 	switch compression { | ||||||
|  | 	case CompressionPNGNone: | ||||||
|  | 		enc := &png.Encoder{CompressionLevel: png.NoCompression} | ||||||
|  | 		err = enc.Encode(&bfr, img) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/png", nil | ||||||
|  | 	case CompressionPNGSpeed: | ||||||
|  | 		enc := &png.Encoder{CompressionLevel: png.BestSpeed} | ||||||
|  | 		err = enc.Encode(&bfr, img) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/png", nil | ||||||
|  | 	case CompressionPNGBest: | ||||||
|  | 		enc := &png.Encoder{CompressionLevel: png.BestCompression} | ||||||
|  | 		err = enc.Encode(&bfr, img) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/png", nil | ||||||
|  | 	case CompressionJPEG100: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 100}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG90: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 90}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG80: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 80}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG70: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 70}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG60: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 60}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG50: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 50}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG25: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 25}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG10: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 10}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	case CompressionJPEG1: | ||||||
|  | 		err = jpeg.Encode(&bfr, img, &jpeg.Options{Quality: 1}) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return bytes.Buffer{}, "", exerr.Wrap(err, "").Build() | ||||||
|  | 		} | ||||||
|  | 		return bfr, "image/jpeg", nil | ||||||
|  | 	default: | ||||||
|  | 		return bytes.Buffer{}, "", exerr.New(exerr.TypeInternal, "unknown compression method: "+compression.String()).Build() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fillColor color.Color) (image.Image, error) { | ||||||
|  |  | ||||||
|  | 	iw := img.Bounds().Size().X | ||||||
|  | 	ih := img.Bounds().Size().Y | ||||||
|  |  | ||||||
|  | 	// [iw, ih]   is the size of the image | ||||||
|  | 	// [bbw, bbh] is the target bounding box, | ||||||
|  | 	//             - it specifies the target ratio | ||||||
|  | 	//             - and the maximal target resolution | ||||||
|  |  | ||||||
|  | 	facW := float64(iw) / bbw | ||||||
|  | 	facH := float64(ih) / bbh | ||||||
|  |  | ||||||
|  | 	// facW is the ratio between iw and bbw | ||||||
|  | 	//  - it is the factor by which the bounding box must be multiplied to reach the image size (in the x-axis) | ||||||
|  | 	// | ||||||
|  | 	// (same is true for facH, but for the height and y-axis) | ||||||
|  |  | ||||||
|  | 	if fit == ImageFitCover { | ||||||
|  |  | ||||||
|  | 		// image-fit:cover completely fills the target-bounding-box, it potentially cuts parts of the image away | ||||||
|  |  | ||||||
|  | 		// we use the smaller (!) value of facW and facH, because we want to have the smallest possible destination rect (due to file size) | ||||||
|  | 		// and because the image is made to completely fill the bounding-box, the smaller factor (= teh dimension the image is stretched more) is relevant | ||||||
|  |  | ||||||
|  | 		// but we cap `fac` at 1 (can be larger than 1) | ||||||
|  | 		// a value >1 would mean the final image resolution is biger than the bounding box, which we do not want. | ||||||
|  |  | ||||||
|  | 		// if the initial image (iw, ih) is already bigger than the bounding box (bbw, bbh), facW and facH are always >1 and fac will be 1 | ||||||
|  | 		// which means we will simply use the bounding box as destination rect (and scale the image down) | ||||||
|  |  | ||||||
|  | 		fac := mathext.Clamp(mathext.Min(facW, facH), 0.0, 1.0) | ||||||
|  |  | ||||||
|  | 		// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio) | ||||||
|  |  | ||||||
|  | 		w := int(math.Round(bbw * fac)) | ||||||
|  | 		h := int(math.Round(bbh * fac)) | ||||||
|  |  | ||||||
|  | 		img = imaging.Fill(img, w, h, imaging.Center, imaging.Lanczos) | ||||||
|  |  | ||||||
|  | 		newImg := image.NewRGBA(image.Rect(0, 0, w, h)) | ||||||
|  |  | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over) | ||||||
|  |  | ||||||
|  | 		return newImg, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fit == ImageFitContainCenter || fit == ImageFitContainTopLeft || fit == ImageFitContainTopRight || fit == ImageFitContainBottomLeft || fit == ImageFitContainBottomRight { | ||||||
|  |  | ||||||
|  | 		// image-fit:cover fills the target-bounding-box with the image, there is potentially empty-space, it potentially cuts parts of the image away | ||||||
|  |  | ||||||
|  | 		// we use the bigger (!) value of facW and facH, | ||||||
|  | 		// because the image is made to fit the bounding-box, the bigger factor (= the dimension the image is stretched less) is relevant | ||||||
|  |  | ||||||
|  | 		// but we cap `fac` at 1 (can be larger than 1) | ||||||
|  | 		// a value >1 would mean the final image resolution is biger than the bounding box, which we do not want. | ||||||
|  |  | ||||||
|  | 		// if the initial image (iw, ih) is already bigger than the bounding box (bbw, bbh), facW and facH are always >1 and fac will be 1 | ||||||
|  | 		// which means we will simply use the bounding box as destination rect (and scale the image down) | ||||||
|  |  | ||||||
|  | 		facOut := mathext.Clamp(mathext.Max(facW, facH), 0.0, 1.0) | ||||||
|  |  | ||||||
|  | 		// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio) | ||||||
|  |  | ||||||
|  | 		// [ow|oh] ==> size of output image (same ratio as bounding box [bbw|bbh]) | ||||||
|  |  | ||||||
|  | 		ow := int(math.Round(bbw * facOut)) | ||||||
|  | 		oh := int(math.Round(bbh * facOut)) | ||||||
|  |  | ||||||
|  | 		facScale := mathext.Min(float64(ow)/float64(iw), float64(oh)/float64(ih)) | ||||||
|  |  | ||||||
|  | 		// [dw|dh] ==> size of destination rect (where to draw source in output image) (same ratio as input image [iw|ih]) | ||||||
|  |  | ||||||
|  | 		dw := int(math.Round(float64(iw) * facScale)) | ||||||
|  | 		dh := int(math.Round(float64(ih) * facScale)) | ||||||
|  |  | ||||||
|  | 		img = imaging.Resize(img, dw, dh, imaging.Lanczos) | ||||||
|  |  | ||||||
|  | 		var destBounds image.Rectangle | ||||||
|  | 		if fit == ImageFitContainCenter { | ||||||
|  | 			destBounds = image.Rect((ow-dw)/2, (oh-dh)/2, (ow-dw)/2+dw, (oh-dh)/2+dh) | ||||||
|  | 		} else if fit == ImageFitContainTopLeft { | ||||||
|  | 			destBounds = image.Rect(0, 0, dw, dh) | ||||||
|  | 		} else if fit == ImageFitContainTopRight { | ||||||
|  | 			destBounds = image.Rect(ow-dw, 0, ow, dh) | ||||||
|  | 		} else if fit == ImageFitContainBottomLeft { | ||||||
|  | 			destBounds = image.Rect(0, oh-dh, dw, oh) | ||||||
|  | 		} else if fit == ImageFitContainBottomRight { | ||||||
|  | 			destBounds = image.Rect(ow-dw, oh-dh, ow, oh) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		newImg := image.NewRGBA(image.Rect(0, 0, ow, oh)) | ||||||
|  |  | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||||
|  | 		draw.Draw(newImg, destBounds, img, image.Pt(0, 0), draw.Over) | ||||||
|  |  | ||||||
|  | 		return newImg, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fit == ImageFitStretch { | ||||||
|  |  | ||||||
|  | 		// image-fit:stretch simply stretches the image to the bounding box | ||||||
|  |  | ||||||
|  | 		// we use the bigger value of [facW;facH], to (potentially) scale the bounding box down before applying it | ||||||
|  | 		// theoretically we could directly use [bbw, bbh] in the call to imaging.Resize, | ||||||
|  | 		// but if the image is (a lot) smaller than the bouding box it is useful to scale it down to reduce final pdf filesize | ||||||
|  |  | ||||||
|  | 		// we also cap fac at 1, because we never want the final rect to be bigger than the inputted bounding box (see comments at start of method) | ||||||
|  |  | ||||||
|  | 		fac := mathext.Clamp(mathext.Max(facW, facH), 0.0, 1.0) | ||||||
|  |  | ||||||
|  | 		// we scale the bounding box by fac (both dimension the same amount, to keep the bounding-box ratio) | ||||||
|  |  | ||||||
|  | 		w := int(math.Round(bbw * fac)) | ||||||
|  | 		h := int(math.Round(bbh * fac)) | ||||||
|  |  | ||||||
|  | 		img = imaging.Resize(img, w, h, imaging.Lanczos) | ||||||
|  |  | ||||||
|  | 		newImg := image.NewRGBA(image.Rect(0, 0, w, h)) | ||||||
|  |  | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||||
|  | 		draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over) | ||||||
|  |  | ||||||
|  | 		return newImg, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil, exerr.New(exerr.TypeInternal, fmt.Sprintf("unknown image-fit: '%s'", fit)).Build() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func VerifyAndDecodeImage(data io.Reader, mime string) (image.Image, error) { | ||||||
|  |  | ||||||
|  | 	if mime == "image/jpeg" { | ||||||
|  | 		img, err := jpeg.Decode(data) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "failed to decode blob as jpeg").WithType(exerr.TypeInvalidImage).Build() | ||||||
|  | 		} | ||||||
|  | 		return img, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if mime == "image/png" { | ||||||
|  | 		img, err := png.Decode(data) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "failed to decode blob as png").WithType(exerr.TypeInvalidImage).Build() | ||||||
|  | 		} | ||||||
|  | 		return img, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil, exerr.New(exerr.TypeInvalidMimeType, fmt.Sprintf("unknown/invalid image mimetype: '%s'", mime)).Build() | ||||||
|  | } | ||||||
| @@ -59,6 +59,18 @@ func ArrUnique[T comparable](array []T) []T { | |||||||
| 	return result | 	return result | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrUniqueStable[T comparable](array []T) []T { | ||||||
|  | 	hist := make(map[T]bool, len(array)) | ||||||
|  | 	result := make([]T, 0, len(array)) | ||||||
|  | 	for _, v := range array { | ||||||
|  | 		if _, ok := hist[v]; !ok { | ||||||
|  | 			hist[v] = true | ||||||
|  | 			result = append(result, v) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
| func ArrEqualsExact[T comparable](arr1 []T, arr2 []T) bool { | func ArrEqualsExact[T comparable](arr1 []T, arr2 []T) bool { | ||||||
| 	if len(arr1) != len(arr2) { | 	if len(arr1) != len(arr2) { | ||||||
| 		return false | 		return false | ||||||
| @@ -265,6 +277,15 @@ func ArrFirstIndex[T comparable](arr []T, needle T) int { | |||||||
| 	return -1 | 	return -1 | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrFirstIndexFunc[T any](arr []T, comp func(v T) bool) int { | ||||||
|  | 	for i, v := range arr { | ||||||
|  | 		if comp(v) { | ||||||
|  | 			return i | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return -1 | ||||||
|  | } | ||||||
|  |  | ||||||
| func ArrLastIndex[T comparable](arr []T, needle T) int { | func ArrLastIndex[T comparable](arr []T, needle T) int { | ||||||
| 	result := -1 | 	result := -1 | ||||||
| 	for i, v := range arr { | 	for i, v := range arr { | ||||||
| @@ -275,6 +296,16 @@ func ArrLastIndex[T comparable](arr []T, needle T) int { | |||||||
| 	return result | 	return result | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrLastIndexFunc[T any](arr []T, comp func(v T) bool) int { | ||||||
|  | 	result := -1 | ||||||
|  | 	for i, v := range arr { | ||||||
|  | 		if comp(v) { | ||||||
|  | 			result = i | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
| func AddToSet[T comparable](set []T, add T) []T { | func AddToSet[T comparable](set []T, add T) []T { | ||||||
| 	for _, v := range set { | 	for _, v := range set { | ||||||
| 		if v == add { | 		if v == add { | ||||||
| @@ -400,7 +431,7 @@ func ArrCastErr[T1 any, T2 any](arr []T1) ([]T2, error) { | |||||||
| 		if vcast, ok := any(v).(T2); ok { | 		if vcast, ok := any(v).(T2); ok { | ||||||
| 			r[i] = vcast | 			r[i] = vcast | ||||||
| 		} else { | 		} else { | ||||||
| 			return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2))) | 			return nil, errors.New(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2))) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	return r, nil | 	return r, nil | ||||||
| @@ -412,7 +443,7 @@ func ArrCastPanic[T1 any, T2 any](arr []T1) []T2 { | |||||||
| 		if vcast, ok := any(v).(T2); ok { | 		if vcast, ok := any(v).(T2); ok { | ||||||
| 			r[i] = vcast | 			r[i] = vcast | ||||||
| 		} else { | 		} else { | ||||||
| 			panic(fmt.Sprintf("Cannot cast element %d of type %T to type %s", i, v, *new(T2))) | 			panic(fmt.Sprintf("Cannot cast element %d of type %T to type %v", i, v, *new(T2))) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	return r | 	return r | ||||||
| @@ -434,6 +465,26 @@ func ArrConcat[T any](arr ...[]T) []T { | |||||||
| 	return r | 	return r | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // ArrAppend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one | ||||||
|  | func ArrAppend[T any](arr []T, add ...T) []T { | ||||||
|  | 	r := ArrCopy(arr) | ||||||
|  | 	for _, v := range add { | ||||||
|  | 		r = append(r, v) | ||||||
|  | 	} | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ArrPrepend works similar to append(x, y, z) - but doe snot touch the old array and creates a new one | ||||||
|  | // Also - in contrast to ArrAppend - the add values are inserted at the start of the resulting array (in reverse order) | ||||||
|  | func ArrPrepend[T any](arr []T, add ...T) []T { | ||||||
|  | 	out := make([]T, len(arr)+len(add)) | ||||||
|  | 	copy(out[len(add):], arr) | ||||||
|  | 	for i := 0; i < len(add); i++ { | ||||||
|  | 		out[len(add)-i-1] = add[i] | ||||||
|  | 	} | ||||||
|  | 	return out | ||||||
|  | } | ||||||
|  |  | ||||||
| // ArrCopy does a shallow copy of the 'in' array | // ArrCopy does a shallow copy of the 'in' array | ||||||
| func ArrCopy[T any](in []T) []T { | func ArrCopy[T any](in []T) []T { | ||||||
| 	out := make([]T, len(in)) | 	out := make([]T, len(in)) | ||||||
| @@ -449,6 +500,10 @@ func ArrRemove[T comparable](arr []T, needle T) []T { | |||||||
| 	return arr | 	return arr | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrRemoveAt[T any](arr []T, idx int) []T { | ||||||
|  | 	return append(arr[:idx], arr[idx+1:]...) | ||||||
|  | } | ||||||
|  |  | ||||||
| func ArrExcept[T comparable](arr []T, needles ...T) []T { | func ArrExcept[T comparable](arr []T, needles ...T) []T { | ||||||
| 	r := make([]T, 0, len(arr)) | 	r := make([]T, 0, len(arr)) | ||||||
| 	rmlist := ArrToSet(needles) | 	rmlist := ArrToSet(needles) | ||||||
| @@ -479,3 +534,33 @@ func JoinString(arr []string, delimiter string) string { | |||||||
|  |  | ||||||
| 	return str | 	return str | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // ArrChunk splits the array into buckets of max-size `chunkSize` | ||||||
|  | // order is being kept. | ||||||
|  | // The last chunk may contain less than length elements. | ||||||
|  | // | ||||||
|  | // (chunkSize == -1) means no chunking | ||||||
|  | // | ||||||
|  | // see https://www.php.net/manual/en/function.array-chunk.php | ||||||
|  | func ArrChunk[T any](arr []T, chunkSize int) [][]T { | ||||||
|  | 	if chunkSize == -1 { | ||||||
|  | 		return [][]T{arr} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	res := make([][]T, 0, 1+len(arr)/chunkSize) | ||||||
|  |  | ||||||
|  | 	i := 0 | ||||||
|  | 	for i < len(arr) { | ||||||
|  |  | ||||||
|  | 		right := i + chunkSize | ||||||
|  | 		if right >= len(arr) { | ||||||
|  | 			right = len(arr) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		res = append(res, arr[i:right]) | ||||||
|  |  | ||||||
|  | 		i = right | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return res | ||||||
|  | } | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ package langext | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"strings" | ||||||
| 	"testing" | 	"testing" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -10,3 +11,13 @@ func TestJoinString(t *testing.T) { | |||||||
| 	res := JoinString(ids, ",") | 	res := JoinString(ids, ",") | ||||||
| 	tst.AssertEqual(t, res, "1,2,3") | 	tst.AssertEqual(t, res, "1,2,3") | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func TestArrPrepend(t *testing.T) { | ||||||
|  | 	v1 := []string{"1", "2", "3"} | ||||||
|  |  | ||||||
|  | 	v2 := ArrPrepend(v1, "4", "5", "6") | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, strings.Join(v1, ""), "123") | ||||||
|  | 	tst.AssertEqual(t, strings.Join(v2, ""), "654123") | ||||||
|  |  | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										178
									
								
								langext/baseAny.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										178
									
								
								langext/baseAny.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,178 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"crypto/rand" | ||||||
|  | 	"errors" | ||||||
|  | 	"math" | ||||||
|  | 	"math/big" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type AnyBaseConverter struct { | ||||||
|  | 	base    uint64 | ||||||
|  | 	charset []rune | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewAnyBaseConverter(cs string) AnyBaseConverter { | ||||||
|  | 	rcs := []rune(cs) | ||||||
|  | 	return AnyBaseConverter{ | ||||||
|  | 		base:    uint64(len(rcs)), | ||||||
|  | 		charset: rcs, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Rand(rlen int) string { | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	randMax := big.NewInt(math.MaxInt64) | ||||||
|  |  | ||||||
|  | 	r := "" | ||||||
|  |  | ||||||
|  | 	for i := 0; i < rlen; i++ { | ||||||
|  | 		v, err := rand.Int(rand.Reader, randMax) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		r += string(bc.charset[v.Mod(v, biBase).Int64()]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) EncodeUInt64(num uint64) string { | ||||||
|  | 	if num == 0 { | ||||||
|  | 		return "0" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	b := "" | ||||||
|  |  | ||||||
|  | 	// loop as long the num is bigger than zero | ||||||
|  | 	for num > 0 { | ||||||
|  | 		r := num % bc.base | ||||||
|  |  | ||||||
|  | 		num -= r | ||||||
|  | 		num /= base62Base | ||||||
|  |  | ||||||
|  | 		b += string(bc.charset[int(r)]) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) DecodeUInt64(str string) (uint64, error) { | ||||||
|  | 	if str == "" { | ||||||
|  | 		return 0, errors.New("empty string") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	result := uint64(0) | ||||||
|  |  | ||||||
|  | 	for _, v := range str { | ||||||
|  | 		result *= base62Base | ||||||
|  |  | ||||||
|  | 		pos := ArrFirstIndex(bc.charset, v) | ||||||
|  | 		if pos == -1 { | ||||||
|  | 			return 0, errors.New("invalid character: " + string(v)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		result += uint64(pos) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Encode(src []byte) string { | ||||||
|  | 	value := new(big.Int) | ||||||
|  | 	value.SetBytes(src) | ||||||
|  | 	return bc.EncodeBigInt(value) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) EncodeBigInt(src *big.Int) string { | ||||||
|  | 	value := new(big.Int) | ||||||
|  | 	value.Set(src) | ||||||
|  |  | ||||||
|  | 	isneg := value.Sign() < 0 | ||||||
|  |  | ||||||
|  | 	answer := "" | ||||||
|  |  | ||||||
|  | 	if isneg { | ||||||
|  | 		value.Neg(value) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	rem := new(big.Int) | ||||||
|  |  | ||||||
|  | 	for value.Sign() > 0 { | ||||||
|  | 		value.QuoRem(value, biBase, rem) | ||||||
|  | 		answer = string(bc.charset[rem.Int64()]) + answer | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if isneg { | ||||||
|  | 		return "-" + answer | ||||||
|  | 	} else { | ||||||
|  | 		return answer | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) Decode(src string) ([]byte, error) { | ||||||
|  | 	value, err := bc.DecodeToBigInt(src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, err | ||||||
|  | 	} | ||||||
|  | 	return value.Bytes(), nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (bc AnyBaseConverter) DecodeToBigInt(_src string) (*big.Int, error) { | ||||||
|  | 	result := new(big.Int) | ||||||
|  | 	result.SetInt64(0) | ||||||
|  |  | ||||||
|  | 	src := []rune(_src) | ||||||
|  |  | ||||||
|  | 	if len(src) == 0 { | ||||||
|  | 		return nil, errors.New("string is empty") | ||||||
|  | 	} | ||||||
|  | 	if bc.base < 2 { | ||||||
|  | 		return nil, errors.New("not enough digits") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	i := 0 | ||||||
|  |  | ||||||
|  | 	sign := new(big.Int) | ||||||
|  | 	sign.SetInt64(1) | ||||||
|  | 	if src[i] == '+' { | ||||||
|  | 		i++ | ||||||
|  | 	} else if src[i] == '-' { | ||||||
|  | 		i++ | ||||||
|  | 		sign.SetInt64(-1) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if i >= len(src) { | ||||||
|  | 		return nil, errors.New("no digits in input") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	biBase := big.NewInt(int64(bc.base)) | ||||||
|  |  | ||||||
|  | 	oldResult := new(big.Int) | ||||||
|  |  | ||||||
|  | 	for ; i < len(src); i++ { | ||||||
|  | 		n := ArrFirstIndex(bc.charset, src[i]) | ||||||
|  | 		if n < 0 { | ||||||
|  | 			return nil, errors.New("invalid characters in input") | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		oldResult.Set(result) | ||||||
|  |  | ||||||
|  | 		result.Mul(result, biBase) | ||||||
|  | 		result.Add(result, big.NewInt(int64(n))) | ||||||
|  |  | ||||||
|  | 		if result.Cmp(oldResult) < 0 { | ||||||
|  | 			return nil, errors.New("overflow") | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if sign.Cmp(big.NewInt(0)) < 0 { | ||||||
|  | 		result.Neg(result) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
							
								
								
									
										80
									
								
								langext/baseAny_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								langext/baseAny_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func _anyEncStr(bc AnyBaseConverter, v string) string { | ||||||
|  | 	vr := bc.Encode([]byte(v)) | ||||||
|  | 	return vr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func _anyDecStr(bc AnyBaseConverter, v string) string { | ||||||
|  | 	vr, err := bc.Decode(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(vr) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBase58DefaultEncoding(t *testing.T) { | ||||||
|  | 	tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "Hello"), "9Ajdvzr") | ||||||
|  | 	tst.AssertEqual(t, _anyEncStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in."), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBase58DefaultDecoding(t *testing.T) { | ||||||
|  | 	tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "9Ajdvzr"), "Hello") | ||||||
|  | 	tst.AssertEqual(t, _anyDecStr(NewAnyBaseConverter("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), "48638SMcJuah5okqPx4kCVf5d8QAdgbdNf28g7ReY13prUENNbMyssjq5GjsrJHF5zeZfqs4uJMUJHr7VbrU4XBUZ2Fw9DVtqtn9N1eXucEWSEZahXV6w4ysGSWqGdpeYTJf1MdDzTg8vfcQViifJjZX"), "If debugging is the process of removing software bugs, then programming must be the process of putting them in.") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestAnyBaseDecode(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	const ( | ||||||
|  | 		Binary  = "01" | ||||||
|  | 		Decimal = "0123456789" | ||||||
|  | 		Hex     = "0123456789ABCDEF" | ||||||
|  | 		DNA     = "ACGT" | ||||||
|  | 		Base32  = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" | ||||||
|  | 		Base58  = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" | ||||||
|  | 		Base62  = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" | ||||||
|  | 		Base64  = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" | ||||||
|  | 		Base256 = "🚀🪐☄🛰🌌🌑🌒🌓🌔🌕🌖🌗🌘🌍🌏🌎🐉☀💻🖥💾💿😂❤😍🤣😊🙏💕😭😘👍😅👏😁🔥🥰💔💖💙😢🤔😆🙄💪😉☺👌🤗💜😔😎😇🌹🤦🎉💞✌✨🤷😱😌🌸🙌😋💗💚😏💛🙂💓🤩😄😀🖤😃💯🙈👇🎶😒🤭❣😜💋👀😪😑💥🙋😞😩😡🤪👊🥳😥🤤👉💃😳✋😚😝😴🌟😬🙃🍀🌷😻😓⭐✅🥺🌈😈🤘💦✔😣🏃💐☹🎊💘😠☝😕🌺🎂🌻😐🖕💝🙊😹🗣💫💀👑🎵🤞😛🔴😤🌼😫⚽🤙☕🏆🤫👈😮🙆🍻🍃🐶💁😲🌿🧡🎁⚡🌞🎈❌✊👋😰🤨😶🤝🚶💰🍓💢🤟🙁🚨💨🤬✈🎀🍺🤓😙💟🌱😖👶🥴▶➡❓💎💸⬇😨🌚🦋😷🕺⚠🙅😟😵👎🤲🤠🤧📌🔵💅🧐🐾🍒😗🤑🌊🤯🐷☎💧😯💆👆🎤🙇🍑❄🌴💣🐸💌📍🥀🤢👅💡💩👐📸👻🤐🤮🎼🥵🚩🍎🍊👼💍📣🥂" | ||||||
|  | 	) | ||||||
|  |  | ||||||
|  | 	type TestDef struct { | ||||||
|  | 		FromCS  string | ||||||
|  | 		FromVal string | ||||||
|  | 		ToCS    string | ||||||
|  | 		ToVal   string | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	defs := []TestDef{ | ||||||
|  | 		{Binary, "10100101011100000101010", Decimal, "5421098"}, | ||||||
|  | 		{Decimal, "5421098", DNA, "CCAGGTGAAGGG"}, | ||||||
|  | 		{Decimal, "5421098", DNA, "CCAGGTGAAGGG"}, | ||||||
|  | 		{Decimal, "80085", Base256, "🪐💞🔵"}, | ||||||
|  | 		{Hex, "48656C6C6C20576F526C5421", Base64, "SGVsbGwgV29SbFQh"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base32, "CIMVWGY3B7QFO32SNRPZBB"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base58, "2fUsGKQUcgQcwSqpvy6"}, | ||||||
|  | 		{Base64, "SGVsbGw/gV29SbF+Qh", Base62, "V34nvybdQ3m3RHk9Sr"}, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, def := range defs { | ||||||
|  |  | ||||||
|  | 		d1 := NewAnyBaseConverter(def.FromCS) | ||||||
|  | 		d2 := NewAnyBaseConverter(def.ToCS) | ||||||
|  |  | ||||||
|  | 		v1 := tst.Must(d1.Decode(def.FromVal))(t) | ||||||
|  | 		v2 := tst.Must(d2.Decode(def.ToVal))(t) | ||||||
|  |  | ||||||
|  | 		tst.AssertArrayEqual(t, v1, v2) | ||||||
|  |  | ||||||
|  | 		str2 := d2.Encode(v1) | ||||||
|  | 		tst.AssertEqual(t, str2, def.ToVal) | ||||||
|  |  | ||||||
|  | 		str1 := d1.Encode(v2) | ||||||
|  | 		tst.AssertEqual(t, str1, def.FromVal) | ||||||
|  |  | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -5,12 +5,84 @@ import ( | |||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func Coalesce[T any](v *T, def T) T { | func Coalesce[T any](v1 *T, def T) T { | ||||||
| 	if v == nil { | 	if v1 != nil { | ||||||
| 		return def | 		return *v1 | ||||||
| 	} else { |  | ||||||
| 		return *v |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	return def | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CoalesceOpt[T any](v1 *T, v2 *T) *T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return v2 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Coalesce3[T any](v1 *T, v2 *T, def T) T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return *v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v2 != nil { | ||||||
|  | 		return *v2 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return def | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Coalesce3Opt[T any](v1 *T, v2 *T, v3 *T) *T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v2 != nil { | ||||||
|  | 		return v2 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return v3 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Coalesce4[T any](v1 *T, v2 *T, v3 *T, def T) T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return *v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v2 != nil { | ||||||
|  | 		return *v2 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v3 != nil { | ||||||
|  | 		return *v3 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return def | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Coalesce4Opt[T any](v1 *T, v2 *T, v3 *T, v4 *T) *T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v2 != nil { | ||||||
|  | 		return v2 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v3 != nil { | ||||||
|  | 		return v3 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return v4 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func CoalesceDblPtr[T any](v1 **T, v2 *T) *T { | ||||||
|  | 	if v1 != nil { | ||||||
|  | 		return *v1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return v2 | ||||||
| } | } | ||||||
|  |  | ||||||
| func CoalesceString(s *string, def string) string { | func CoalesceString(s *string, def string) string { | ||||||
|   | |||||||
| @@ -63,3 +63,51 @@ func PatchRemJson[JV string | []byte](rawjson JV, key string) (JV, error) { | |||||||
|  |  | ||||||
| 	return JV(newjson), nil | 	return JV(newjson), nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func MarshalJsonOrPanic(v any) string { | ||||||
|  | 	bin, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(bin) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonOrDefault(v any, def string) string { | ||||||
|  | 	bin, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return def | ||||||
|  | 	} | ||||||
|  | 	return string(bin) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonOrNil(v any) *string { | ||||||
|  | 	bin, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return Ptr(string(bin)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonIndentOrPanic(v any, prefix, indent string) string { | ||||||
|  | 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return string(bin) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonIndentOrDefault(v any, prefix, indent string, def string) string { | ||||||
|  | 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return def | ||||||
|  | 	} | ||||||
|  | 	return string(bin) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func MarshalJsonIndentOrNil(v any, prefix, indent string) *string { | ||||||
|  | 	bin, err := json.MarshalIndent(v, prefix, indent) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return Ptr(string(bin)) | ||||||
|  | } | ||||||
|   | |||||||
| @@ -29,6 +29,14 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V { | |||||||
| 	return result | 	return result | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func ArrToKVMap[T any, K comparable, V any](a []T, keyfunc func(T) K, valfunc func(T) V) map[K]V { | ||||||
|  | 	result := make(map[K]V, len(a)) | ||||||
|  | 	for _, v := range a { | ||||||
|  | 		result[keyfunc(v)] = valfunc(v) | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
| func ArrToSet[T comparable](a []T) map[T]bool { | func ArrToSet[T comparable](a []T) map[T]bool { | ||||||
| 	result := make(map[T]bool, len(a)) | 	result := make(map[T]bool, len(a)) | ||||||
| 	for _, v := range a { | 	for _, v := range a { | ||||||
| @@ -63,3 +71,19 @@ func ForceMap[K comparable, V any](v map[K]V) map[K]V { | |||||||
| 		return v | 		return v | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func MapMerge[K comparable, V any](base map[K]V, arr ...map[K]V) map[K]V { | ||||||
|  | 	res := make(map[K]V, len(base)*(1+len(arr))) | ||||||
|  |  | ||||||
|  | 	for k, v := range base { | ||||||
|  | 		res[k] = v | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, m := range arr { | ||||||
|  | 		for k, v := range m { | ||||||
|  | 			res[k] = v | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return res | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										21
									
								
								langext/must.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								langext/must.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | // Must returns a value and panics on error | ||||||
|  | // | ||||||
|  | // Usage: Must(methodWithError(...)) | ||||||
|  | func Must[T any](v T, err error) T { | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // MustBool returns a value and panics on missing | ||||||
|  | // | ||||||
|  | // Usage: MustBool(methodWithOkayReturn(...)) | ||||||
|  | func MustBool[T any](v T, ok bool) T { | ||||||
|  | 	if !ok { | ||||||
|  | 		panic("not ok") | ||||||
|  | 	} | ||||||
|  | 	return v | ||||||
|  | } | ||||||
							
								
								
									
										19
									
								
								langext/object.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								langext/object.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import "encoding/json" | ||||||
|  |  | ||||||
|  | func DeepCopyByJson[T any](v T) (T, error) { | ||||||
|  |  | ||||||
|  | 	bin, err := json.Marshal(v) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(T), err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var result T | ||||||
|  | 	err = json.Unmarshal(bin, &result) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(T), err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, nil | ||||||
|  | } | ||||||
| @@ -1,7 +1,10 @@ | |||||||
| package langext | package langext | ||||||
|  |  | ||||||
|  | import "runtime/debug" | ||||||
|  |  | ||||||
| type PanicWrappedErr struct { | type PanicWrappedErr struct { | ||||||
| 	panic any | 	panic any | ||||||
|  | 	Stack string | ||||||
| } | } | ||||||
|  |  | ||||||
| func (p PanicWrappedErr) Error() string { | func (p PanicWrappedErr) Error() string { | ||||||
| @@ -15,7 +18,7 @@ func (p PanicWrappedErr) ReoveredObj() any { | |||||||
| func RunPanicSafe(fn func()) (err error) { | func RunPanicSafe(fn func()) (err error) { | ||||||
| 	defer func() { | 	defer func() { | ||||||
| 		if rec := recover(); rec != nil { | 		if rec := recover(); rec != nil { | ||||||
| 			err = PanicWrappedErr{panic: rec} | 			err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())} | ||||||
| 		} | 		} | ||||||
| 	}() | 	}() | ||||||
|  |  | ||||||
| @@ -27,7 +30,7 @@ func RunPanicSafe(fn func()) (err error) { | |||||||
| func RunPanicSafeR1(fn func() error) (err error) { | func RunPanicSafeR1(fn func() error) (err error) { | ||||||
| 	defer func() { | 	defer func() { | ||||||
| 		if rec := recover(); rec != nil { | 		if rec := recover(); rec != nil { | ||||||
| 			err = PanicWrappedErr{panic: rec} | 			err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())} | ||||||
| 		} | 		} | ||||||
| 	}() | 	}() | ||||||
|  |  | ||||||
| @@ -38,7 +41,7 @@ func RunPanicSafeR2[T1 any](fn func() (T1, error)) (r1 T1, err error) { | |||||||
| 	defer func() { | 	defer func() { | ||||||
| 		if rec := recover(); rec != nil { | 		if rec := recover(); rec != nil { | ||||||
| 			r1 = *new(T1) | 			r1 = *new(T1) | ||||||
| 			err = PanicWrappedErr{panic: rec} | 			err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())} | ||||||
| 		} | 		} | ||||||
| 	}() | 	}() | ||||||
|  |  | ||||||
| @@ -50,7 +53,7 @@ func RunPanicSafeR3[T1 any, T2 any](fn func() (T1, T2, error)) (r1 T1, r2 T2, er | |||||||
| 		if rec := recover(); rec != nil { | 		if rec := recover(); rec != nil { | ||||||
| 			r1 = *new(T1) | 			r1 = *new(T1) | ||||||
| 			r2 = *new(T2) | 			r2 = *new(T2) | ||||||
| 			err = PanicWrappedErr{panic: rec} | 			err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())} | ||||||
| 		} | 		} | ||||||
| 	}() | 	}() | ||||||
|  |  | ||||||
| @@ -63,7 +66,7 @@ func RunPanicSafeR4[T1 any, T2 any, T3 any](fn func() (T1, T2, T3, error)) (r1 T | |||||||
| 			r1 = *new(T1) | 			r1 = *new(T1) | ||||||
| 			r2 = *new(T2) | 			r2 = *new(T2) | ||||||
| 			r3 = *new(T3) | 			r3 = *new(T3) | ||||||
| 			err = PanicWrappedErr{panic: rec} | 			err = PanicWrappedErr{panic: rec, Stack: string(debug.Stack())} | ||||||
| 		} | 		} | ||||||
| 	}() | 	}() | ||||||
|  |  | ||||||
|   | |||||||
| @@ -10,10 +10,34 @@ var PTrue = Ptr(true) | |||||||
| // PFalse := &false | // PFalse := &false | ||||||
| var PFalse = Ptr(false) | var PFalse = Ptr(false) | ||||||
|  |  | ||||||
|  | // PNil := &nil | ||||||
|  | var PNil = Ptr[any](nil) | ||||||
|  |  | ||||||
| func Ptr[T any](v T) *T { | func Ptr[T any](v T) *T { | ||||||
| 	return &v | 	return &v | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func DblPtr[T any](v T) **T { | ||||||
|  | 	v_ := &v | ||||||
|  | 	return &v_ | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func DblPtrIfNotNil[T any](v *T) **T { | ||||||
|  | 	if v == nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	return &v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func DblPtrNil[T any]() **T { | ||||||
|  | 	var v *T = nil | ||||||
|  | 	return &v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ArrPtr[T any](v ...T) *[]T { | ||||||
|  | 	return &v | ||||||
|  | } | ||||||
|  |  | ||||||
| func PtrInt32(v int32) *int32 { | func PtrInt32(v int32) *int32 { | ||||||
| 	return &v | 	return &v | ||||||
| } | } | ||||||
| @@ -35,7 +59,7 @@ func IsNil(i interface{}) bool { | |||||||
| 		return true | 		return true | ||||||
| 	} | 	} | ||||||
| 	switch reflect.TypeOf(i).Kind() { | 	switch reflect.TypeOf(i).Kind() { | ||||||
| 	case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice: | 	case reflect.Ptr, reflect.Map, reflect.Chan, reflect.Slice, reflect.Func, reflect.UnsafePointer: | ||||||
| 		return reflect.ValueOf(i).IsNil() | 		return reflect.ValueOf(i).IsNil() | ||||||
| 	} | 	} | ||||||
| 	return false | 	return false | ||||||
|   | |||||||
| @@ -8,12 +8,28 @@ func Sort[T OrderedConstraint](arr []T) { | |||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSorted[T OrderedConstraint](arr []T) []T { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.Slice(arr, func(i1, i2 int) bool { | ||||||
|  | 		return arr[i1] < arr[i2] | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func SortStable[T OrderedConstraint](arr []T) { | func SortStable[T OrderedConstraint](arr []T) { | ||||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
| 		return arr[i1] < arr[i2] | 		return arr[i1] < arr[i2] | ||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedStable[T OrderedConstraint](arr []T) []T { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
|  | 		return arr[i1] < arr[i2] | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func IsSorted[T OrderedConstraint](arr []T) bool { | func IsSorted[T OrderedConstraint](arr []T) bool { | ||||||
| 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | ||||||
| 		return arr[i1] < arr[i2] | 		return arr[i1] < arr[i2] | ||||||
| @@ -26,12 +42,28 @@ func SortSlice[T any](arr []T, less func(v1, v2 T) bool) { | |||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedSlice[T any](arr []T, less func(v1, v2 T) bool) []T { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.Slice(arr, func(i1, i2 int) bool { | ||||||
|  | 		return less(arr[i1], arr[i2]) | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func SortSliceStable[T any](arr []T, less func(v1, v2 T) bool) { | func SortSliceStable[T any](arr []T, less func(v1, v2 T) bool) { | ||||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
| 		return less(arr[i1], arr[i2]) | 		return less(arr[i1], arr[i2]) | ||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedSliceStable[T any](arr []T, less func(v1, v2 T) bool) []T { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
|  | 		return less(arr[i1], arr[i2]) | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func IsSliceSorted[T any](arr []T, less func(v1, v2 T) bool) bool { | func IsSliceSorted[T any](arr []T, less func(v1, v2 T) bool) bool { | ||||||
| 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | 	return sort.SliceIsSorted(arr, func(i1, i2 int) bool { | ||||||
| 		return less(arr[i1], arr[i2]) | 		return less(arr[i1], arr[i2]) | ||||||
| @@ -44,12 +76,28 @@ func SortBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TEle | |||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) []TElem { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.Slice(arr, func(i1, i2 int) bool { | ||||||
|  | 		return selector(arr[i1]) < selector(arr[i2]) | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func SortByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | func SortByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | ||||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
| 		return selector(arr[i1]) < selector(arr[i2]) | 		return selector(arr[i1]) < selector(arr[i2]) | ||||||
| 	}) | 	}) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func AsSortedByStable[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) []TElem { | ||||||
|  | 	arr = ArrCopy(arr) | ||||||
|  | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
|  | 		return selector(arr[i1]) < selector(arr[i2]) | ||||||
|  | 	}) | ||||||
|  | 	return arr | ||||||
|  | } | ||||||
|  |  | ||||||
| func IsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | func IsSortedBy[TElem any, TSel OrderedConstraint](arr []TElem, selector func(v TElem) TSel) { | ||||||
| 	sort.SliceStable(arr, func(i1, i2 int) bool { | 	sort.SliceStable(arr, func(i1, i2 int) bool { | ||||||
| 		return selector(arr[i1]) < selector(arr[i2]) | 		return selector(arr[i1]) < selector(arr[i2]) | ||||||
|   | |||||||
							
								
								
									
										29
									
								
								langext/url.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								langext/url.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func BuildUrl(url, path string, params *map[string]string) string { | ||||||
|  | 	if path[:1] == "/" && url[len(url)-1:] == "/" { | ||||||
|  | 		url += path[1:] | ||||||
|  | 	} else if path[:1] != "/" && url[len(url)-1:] != "/" { | ||||||
|  | 		url += "/" + path | ||||||
|  | 	} else { | ||||||
|  | 		url += path | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if params == nil { | ||||||
|  | 		return url | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for key, value := range *params { | ||||||
|  | 		if strings.Contains(url, "?") { | ||||||
|  | 			url += fmt.Sprintf("&%s=%s", key, value) | ||||||
|  | 		} else { | ||||||
|  | 			url += fmt.Sprintf("?%s=%s", key, value) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return url | ||||||
|  | } | ||||||
							
								
								
									
										45
									
								
								langext/url_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								langext/url_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,45 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestBuildUrl(t *testing.T) { | ||||||
|  | 	tests := []struct { | ||||||
|  | 		Url    string | ||||||
|  | 		Path   string | ||||||
|  | 		Params *map[string]string | ||||||
|  | 		Want   string | ||||||
|  | 	}{ | ||||||
|  | 		{ | ||||||
|  | 			Url:    "https://test.heydyno.de/", | ||||||
|  | 			Path:   "/testing-01", | ||||||
|  | 			Params: &map[string]string{"param1": "value1"}, | ||||||
|  | 			Want:   "https://test.heydyno.de/testing-01?param1=value1", | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Url:    "https://test.heydyno.de", | ||||||
|  | 			Path:   "testing-01", | ||||||
|  | 			Params: &map[string]string{"param1": "value1"}, | ||||||
|  | 			Want:   "https://test.heydyno.de/testing-01?param1=value1", | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Url:    "https://test.heydyno.de", | ||||||
|  | 			Path:   "/testing-01", | ||||||
|  | 			Params: nil, | ||||||
|  | 			Want:   "https://test.heydyno.de/testing-01", | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Url:    "https://test.heydyno.de/", | ||||||
|  | 			Path:   "testing-01", | ||||||
|  | 			Params: nil, | ||||||
|  | 			Want:   "https://test.heydyno.de/testing-01", | ||||||
|  | 		}, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, test := range tests { | ||||||
|  | 		res := BuildUrl(test.Url, test.Path, test.Params) | ||||||
|  | 		tst.AssertEqual(t, res, test.Want) | ||||||
|  | 	} | ||||||
|  | } | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user