Compare commits
	
		
			86 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 4832aa9d6c | |||
| 4d606d3131 | |||
| be9b9e8ccf | |||
| 28cdfc5bd2 | |||
| 10a6627323 | |||
| 06b3b4116e | |||
| ff821390f7 | |||
| c8e9c34706 | |||
| b7c48cb467 | |||
| a0a80899f5 | |||
| 3543441b96 | |||
| eef12da4e6 | |||
| d009aafd4e | |||
| f7b4aa48d7 | |||
| 36b092774d | |||
| a8c6e39ac5 | |||
| 62f2ce9268 | |||
| 49375e90f0 | |||
| d8cf255c80 | |||
| b520282ba0 | |||
| 27cc9366b5 | |||
| d9517fe73c | |||
| 8a92a6cc52 | |||
| 9b2028ab54 | |||
| 207fd331d5 | |||
| 54b0d6701d | |||
| fc2657179b | |||
| d4894e31fe | |||
| 0ddfaf666b | |||
| e154137105 | |||
| 9b9a79b4ad | |||
| 5a8d7110e4 | |||
| d47c84cd47 | |||
| c571f3f888 | |||
| e884ba6b89 | |||
| 1a8e31e5ef | |||
| eccc0fe9e5 | |||
| c8dec24a0d | |||
| b8cb989e54 | |||
| ec672fbd49 | |||
| cfb0b53fc7 | |||
| a7389f44fa | |||
| 69f0fedd66 | |||
| 335ef4d8e8 | |||
| 61801ff20d | |||
| 361dca5c85 | |||
| 9f85a243e8 | |||
| dc6cb274ee | |||
| f6b47792a4 | |||
| 295b3ef793 | |||
| 721c176337 | |||
| ebba6545a3 | |||
| 19c7e22ced | |||
| 9f883b458f | |||
| 1f456c5134 | |||
| d7fbef37db | |||
| a1668b6e5a | |||
| 3a17edfaf0 | |||
| 3320a9c19d | |||
| 8dcd8a270a | |||
| 03a9b276d8 | |||
| 9c8cde384f | |||
| 99b000ecf4 | |||
| a173e30090 | |||
| a3481a7d2d | |||
| a8e6f98a89 | |||
| ab805403b9 | |||
| 1e98d351ce | |||
| c40bdc8e9e | |||
| 7204562879 | |||
| 741611a2e1 | |||
| 133aeb8374 | |||
| b78a468632 | |||
| f1b4480e0f | |||
| ffffe4bf24 | |||
| 413bf3c848 | |||
| 646990b549 | |||
| e5818146a8 | |||
| 1310054121 | |||
| 49d423915c | |||
| 1962cb3c52 | |||
| 84f124dd4d | |||
| ff8e066135 | |||
| bc5c61e43d | |||
| 6ded615723 | |||
| abc8af525a | 
							
								
								
									
										70
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										70
									
								
								README.md
									
									
									
									
									
								
							| @@ -8,7 +8,7 @@ This should not have any heavy dependencies (gin, mongo, etc) and add missing ba | ||||
| Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | ||||
|  | ||||
|  | ||||
| ### Packages: | ||||
| ## Packages: | ||||
|  | ||||
| | Name        | Maintainer | Description                                                                                                   | | ||||
| |-------------|------------|---------------------------------------------------------------------------------------------------------------| | ||||
| @@ -20,8 +20,9 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | ||||
| | zipext      | Mike       | Utility for zip/gzip/tar etc                                                                                  | | ||||
| | reflectext  | Mike       | Utility for golang reflection                                                                                 | | ||||
| | fsext       | Mike       | Utility for filesytem access                                                                                  | | ||||
| | ctxext      | Mike       | Utility for context.Context                                                                                   | | ||||
| |             |            |                                                                                                               | | ||||
| | mongoext    | Mike       | Utility/Helper functions for mongodb                                                                          | | ||||
| | mongoext    | Mike       | Utility/Helper functions for mongodb (kinda abandoned)                                                        | | ||||
| | cursortoken | Mike       | MongoDB cursortoken implementation                                                                            | | ||||
| | pagination  | Mike       | Pagination implementation                                                                                     | | ||||
| |             |            |                                                                                                               | | ||||
| @@ -43,3 +44,68 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"` | ||||
| |             |            |                                                                                                               | | ||||
| | scn         | Mike       | SimpleCloudNotifier                                                                                           | | ||||
| |             |            |                                                                                                               | | ||||
|  | ||||
|  | ||||
|  | ||||
| ## Usage: | ||||
|  | ||||
| ### exerr | ||||
|  | ||||
|  - see **mongoext/builder.go** for full info | ||||
|  | ||||
| Short summary: | ||||
|  - An better error package with metadata, listener, api-output and error-traces | ||||
|  - Initialize with `exerr.Init()` | ||||
|  - *Never* return `err` direct, always use exerr.Wrap(err, "...") - add metadata where applicable | ||||
|  - at the end either Print(), Fatal() or Output() your error (print = stdout, fatal = panic, output = json API response) | ||||
|  - You can add listeners with exerr.RegisterListener(), and save the full errors to a db or smth | ||||
|  | ||||
| ### wmo | ||||
|  | ||||
|  - A typed wrapper around the official mongo-go-driver | ||||
|  - Use `wmo.W[...](...)` to wrap the collections and type-ify them | ||||
|  - The new collections have all the usual methods, but types | ||||
|  - Also they have List() and Paginate() methods for paginated listings (witehr with a cursortoken or page/limit) | ||||
|  - Register additional hooks with `WithDecodeFunc`, `WithUnmarshalHook`, `WithMarshalHook`, `WithModifyingPipeline`, `WithModifyingPipelineFunc` | ||||
|  - List(), Paginate(), etc support filter interfaces | ||||
|    - Rule(s) of thumb:  | ||||
|      - filter the results in the filter interface | ||||
|      - sort the results in the sort function of the filter interface | ||||
|      - add joins ($lookup's) in the `WithModifyingPipelineFunc`/`WithModifyingPipeline` | ||||
|  | ||||
| #### ginext | ||||
|  | ||||
|  - A wrapper around gin-gonic/gin | ||||
|  - create the gin engine with `ginext.NewEngine` | ||||
|  - Add routes with `engine.Routes()...` | ||||
|    - `.Use(..)` adds a middleware | ||||
|    - `.Group(..)` adds a group | ||||
|    - `.Get().Handle(..)` adds a handler | ||||
|  - Handler return values (in contract to ginext) - values implement the `ginext.HTTPResponse` interface | ||||
|  - Every handler starts with something like: | ||||
| ```go  | ||||
| func (handler Handler) CommunityMetricsValues(pctx ginext.PreContext) ginext.HTTPResponse { | ||||
|     type communityURI struct { | ||||
|         Version     string             `uri:"version"` | ||||
|         CommunityID models.CommunityID `uri:"cid"` | ||||
|     } | ||||
|     type body struct { | ||||
|         UserID  models.UserID  `json:"userID"` | ||||
|         EventID models.EventID `json:"eventID"` | ||||
|     } | ||||
|  | ||||
|     var u uri | ||||
|     var b body | ||||
|     ctx, gctx, httpErr := pctx.URI(&u).Body(&b).Start() // can have more unmarshaller, like header, form, etc | ||||
|     if httpErr != nil { | ||||
|     	return *httpErr | ||||
|     } | ||||
|     defer ctx.Cancel() | ||||
|  | ||||
|     // do stuff | ||||
| } | ||||
| ``` | ||||
|  | ||||
| #### sq | ||||
|  | ||||
|  - TODO (like mongoext for sqlite/sql databases) | ||||
| @@ -46,7 +46,7 @@ var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_] | ||||
|  | ||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||
|  | ||||
| var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<comm>.*))?.*$`)) | ||||
| var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]*"|[0-9]+))\s*(//(?P<comm>.*))?.*$`)) | ||||
|  | ||||
| var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||
|  | ||||
|   | ||||
							
								
								
									
										27
									
								
								ctxext/getter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								ctxext/getter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,27 @@ | ||||
| package ctxext | ||||
|  | ||||
| import "context" | ||||
|  | ||||
| func Value[T any](ctx context.Context, key any) (T, bool) { | ||||
| 	v := ctx.Value(key) | ||||
| 	if v == nil { | ||||
| 		return *new(T), false | ||||
| 	} | ||||
| 	if tv, ok := v.(T); !ok { | ||||
| 		return *new(T), false | ||||
| 	} else { | ||||
| 		return tv, true | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func ValueOrDefault[T any](ctx context.Context, key any, def T) T { | ||||
| 	v := ctx.Value(key) | ||||
| 	if v == nil { | ||||
| 		return def | ||||
| 	} | ||||
| 	if tv, ok := v.(T); !ok { | ||||
| 		return def | ||||
| 	} else { | ||||
| 		return tv | ||||
| 	} | ||||
| } | ||||
| @@ -6,3 +6,13 @@ const ( | ||||
| 	SortASC  SortDirection = "ASC" | ||||
| 	SortDESC SortDirection = "DESC" | ||||
| ) | ||||
|  | ||||
| func (sd SortDirection) ToMongo() int { | ||||
| 	if sd == SortASC { | ||||
| 		return 1 | ||||
| 	} else if sd == SortDESC { | ||||
| 		return -1 | ||||
| 	} else { | ||||
| 		return 0 | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -3,12 +3,18 @@ package cursortoken | ||||
| import ( | ||||
| 	"encoding/base32" | ||||
| 	"encoding/json" | ||||
| 	"errors" | ||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type CursorToken interface { | ||||
| 	Token() string | ||||
| 	IsStart() bool | ||||
| 	IsEnd() bool | ||||
| } | ||||
|  | ||||
| type Mode string | ||||
|  | ||||
| const ( | ||||
| @@ -24,97 +30,6 @@ type Extra struct { | ||||
| 	PageSize  *int | ||||
| } | ||||
|  | ||||
| type CursorToken struct { | ||||
| 	Mode               Mode | ||||
| 	ValuePrimary       string | ||||
| 	ValueSecondary     string | ||||
| 	Direction          SortDirection | ||||
| 	DirectionSecondary SortDirection | ||||
| 	PageSize           int | ||||
| 	Extra              Extra | ||||
| } | ||||
|  | ||||
| type cursorTokenSerialize struct { | ||||
| 	ValuePrimary       *string        `json:"v1,omitempty"` | ||||
| 	ValueSecondary     *string        `json:"v2,omitempty"` | ||||
| 	Direction          *SortDirection `json:"dir,omitempty"` | ||||
| 	DirectionSecondary *SortDirection `json:"dir2,omitempty"` | ||||
| 	PageSize           *int           `json:"size,omitempty"` | ||||
|  | ||||
| 	ExtraTimestamp *time.Time `json:"ts,omitempty"` | ||||
| 	ExtraId        *string    `json:"id,omitempty"` | ||||
| 	ExtraPage      *int       `json:"pg,omitempty"` | ||||
| 	ExtraPageSize  *int       `json:"sz,omitempty"` | ||||
| } | ||||
|  | ||||
| func Start() CursorToken { | ||||
| 	return CursorToken{ | ||||
| 		Mode:               CTMStart, | ||||
| 		ValuePrimary:       "", | ||||
| 		ValueSecondary:     "", | ||||
| 		Direction:          "", | ||||
| 		DirectionSecondary: "", | ||||
| 		PageSize:           0, | ||||
| 		Extra:              Extra{}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func End() CursorToken { | ||||
| 	return CursorToken{ | ||||
| 		Mode:               CTMEnd, | ||||
| 		ValuePrimary:       "", | ||||
| 		ValueSecondary:     "", | ||||
| 		Direction:          "", | ||||
| 		DirectionSecondary: "", | ||||
| 		PageSize:           0, | ||||
| 		Extra:              Extra{}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (c *CursorToken) Token() string { | ||||
| 	if c.Mode == CTMStart { | ||||
| 		return "@start" | ||||
| 	} | ||||
| 	if c.Mode == CTMEnd { | ||||
| 		return "@end" | ||||
| 	} | ||||
|  | ||||
| 	// We kinda manually implement omitempty for the CursorToken here | ||||
| 	// because omitempty does not work for time.Time and otherwise we would always | ||||
| 	// get weird time values when decoding a token that initially didn't have an Timestamp set | ||||
| 	// For this usecase we treat Unix=0 as an empty timestamp | ||||
|  | ||||
| 	sertok := cursorTokenSerialize{} | ||||
|  | ||||
| 	if c.ValuePrimary != "" { | ||||
| 		sertok.ValuePrimary = &c.ValuePrimary | ||||
| 	} | ||||
| 	if c.ValueSecondary != "" { | ||||
| 		sertok.ValueSecondary = &c.ValueSecondary | ||||
| 	} | ||||
| 	if c.Direction != "" { | ||||
| 		sertok.Direction = &c.Direction | ||||
| 	} | ||||
| 	if c.DirectionSecondary != "" { | ||||
| 		sertok.DirectionSecondary = &c.DirectionSecondary | ||||
| 	} | ||||
| 	if c.PageSize != 0 { | ||||
| 		sertok.PageSize = &c.PageSize | ||||
| 	} | ||||
|  | ||||
| 	sertok.ExtraTimestamp = c.Extra.Timestamp | ||||
| 	sertok.ExtraId = c.Extra.Id | ||||
| 	sertok.ExtraPage = c.Extra.Page | ||||
| 	sertok.ExtraPageSize = c.Extra.PageSize | ||||
|  | ||||
| 	body, err := json.Marshal(sertok) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
|  | ||||
| 	return "tok_" + base32.StdEncoding.EncodeToString(body) | ||||
| } | ||||
|  | ||||
| func Decode(tok string) (CursorToken, error) { | ||||
| 	if tok == "" { | ||||
| 		return Start(), nil | ||||
| @@ -125,60 +40,56 @@ func Decode(tok string) (CursorToken, error) { | ||||
| 	if strings.ToLower(tok) == "@end" { | ||||
| 		return End(), nil | ||||
| 	} | ||||
|  | ||||
| 	if !strings.HasPrefix(tok, "tok_") { | ||||
| 		return CursorToken{}, errors.New("could not decode token, missing prefix") | ||||
| 	if strings.ToLower(tok) == "$end" { | ||||
| 		return PageEnd(), nil | ||||
| 	} | ||||
| 	if strings.HasPrefix(tok, "$") && len(tok) > 1 { | ||||
| 		n, err := strconv.ParseInt(tok[1:], 10, 64) | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build() | ||||
| 		} | ||||
| 		return Page(int(n)), nil | ||||
| 	} | ||||
|  | ||||
| 	body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):]) | ||||
| 	if err != nil { | ||||
| 		return CursorToken{}, err | ||||
| 	} | ||||
| 	if strings.HasPrefix(tok, "tok_") { | ||||
|  | ||||
| 	var tokenDeserialize cursorTokenSerialize | ||||
| 	err = json.Unmarshal(body, &tokenDeserialize) | ||||
| 	if err != nil { | ||||
| 		return CursorToken{}, err | ||||
| 	} | ||||
| 		body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):]) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
|  | ||||
| 	token := CursorToken{Mode: CTMNormal} | ||||
| 		var tokenDeserialize cursorTokenKeySortSerialize | ||||
| 		err = json.Unmarshal(body, &tokenDeserialize) | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build() | ||||
| 		} | ||||
|  | ||||
| 	if tokenDeserialize.ValuePrimary != nil { | ||||
| 		token.ValuePrimary = *tokenDeserialize.ValuePrimary | ||||
| 	} | ||||
| 	if tokenDeserialize.ValueSecondary != nil { | ||||
| 		token.ValueSecondary = *tokenDeserialize.ValueSecondary | ||||
| 	} | ||||
| 	if tokenDeserialize.Direction != nil { | ||||
| 		token.Direction = *tokenDeserialize.Direction | ||||
| 	} | ||||
| 	if tokenDeserialize.DirectionSecondary != nil { | ||||
| 		token.DirectionSecondary = *tokenDeserialize.DirectionSecondary | ||||
| 	} | ||||
| 	if tokenDeserialize.PageSize != nil { | ||||
| 		token.PageSize = *tokenDeserialize.PageSize | ||||
| 	} | ||||
| 		token := CTKeySort{Mode: CTMNormal} | ||||
|  | ||||
| 	token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp | ||||
| 	token.Extra.Id = tokenDeserialize.ExtraId | ||||
| 	token.Extra.Page = tokenDeserialize.ExtraPage | ||||
| 	token.Extra.PageSize = tokenDeserialize.ExtraPageSize | ||||
| 		if tokenDeserialize.ValuePrimary != nil { | ||||
| 			token.ValuePrimary = *tokenDeserialize.ValuePrimary | ||||
| 		} | ||||
| 		if tokenDeserialize.ValueSecondary != nil { | ||||
| 			token.ValueSecondary = *tokenDeserialize.ValueSecondary | ||||
| 		} | ||||
| 		if tokenDeserialize.Direction != nil { | ||||
| 			token.Direction = *tokenDeserialize.Direction | ||||
| 		} | ||||
| 		if tokenDeserialize.DirectionSecondary != nil { | ||||
| 			token.DirectionSecondary = *tokenDeserialize.DirectionSecondary | ||||
| 		} | ||||
| 		if tokenDeserialize.PageSize != nil { | ||||
| 			token.PageSize = *tokenDeserialize.PageSize | ||||
| 		} | ||||
|  | ||||
| 	return token, nil | ||||
| } | ||||
| 		token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp | ||||
| 		token.Extra.Id = tokenDeserialize.ExtraId | ||||
| 		token.Extra.Page = tokenDeserialize.ExtraPage | ||||
| 		token.Extra.PageSize = tokenDeserialize.ExtraPageSize | ||||
|  | ||||
| 		return token, nil | ||||
|  | ||||
| func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) { | ||||
| 	if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil { | ||||
| 		return oid, true | ||||
| 	} else { | ||||
| 		return primitive.ObjectID{}, false | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) { | ||||
| 	if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil { | ||||
| 		return oid, true | ||||
| 	} else { | ||||
| 		return primitive.ObjectID{}, false | ||||
| 		return nil, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing/unknown prefix").Str("token", tok).Build() | ||||
| 	} | ||||
| } | ||||
|   | ||||
							
								
								
									
										136
									
								
								cursortoken/tokenKeySort.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										136
									
								
								cursortoken/tokenKeySort.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,136 @@ | ||||
| package cursortoken | ||||
|  | ||||
| import ( | ||||
| 	"encoding/base32" | ||||
| 	"encoding/json" | ||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type CTKeySort struct { | ||||
| 	Mode               Mode | ||||
| 	ValuePrimary       string | ||||
| 	ValueSecondary     string | ||||
| 	Direction          SortDirection | ||||
| 	DirectionSecondary SortDirection | ||||
| 	PageSize           int | ||||
| 	Extra              Extra | ||||
| } | ||||
|  | ||||
| type cursorTokenKeySortSerialize struct { | ||||
| 	ValuePrimary       *string        `json:"v1,omitempty"` | ||||
| 	ValueSecondary     *string        `json:"v2,omitempty"` | ||||
| 	Direction          *SortDirection `json:"dir,omitempty"` | ||||
| 	DirectionSecondary *SortDirection `json:"dir2,omitempty"` | ||||
| 	PageSize           *int           `json:"size,omitempty"` | ||||
|  | ||||
| 	ExtraTimestamp *time.Time `json:"ts,omitempty"` | ||||
| 	ExtraId        *string    `json:"id,omitempty"` | ||||
| 	ExtraPage      *int       `json:"pg,omitempty"` | ||||
| 	ExtraPageSize  *int       `json:"sz,omitempty"` | ||||
| } | ||||
|  | ||||
| func NewKeySortToken(valuePrimary string, valueSecondary string, direction SortDirection, directionSecondary SortDirection, pageSize int, extra Extra) CursorToken { | ||||
| 	return CTKeySort{ | ||||
| 		Mode:               CTMNormal, | ||||
| 		ValuePrimary:       valuePrimary, | ||||
| 		ValueSecondary:     valueSecondary, | ||||
| 		Direction:          direction, | ||||
| 		DirectionSecondary: directionSecondary, | ||||
| 		PageSize:           pageSize, | ||||
| 		Extra:              extra, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Start() CursorToken { | ||||
| 	return CTKeySort{ | ||||
| 		Mode:               CTMStart, | ||||
| 		ValuePrimary:       "", | ||||
| 		ValueSecondary:     "", | ||||
| 		Direction:          "", | ||||
| 		DirectionSecondary: "", | ||||
| 		PageSize:           0, | ||||
| 		Extra:              Extra{}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func End() CursorToken { | ||||
| 	return CTKeySort{ | ||||
| 		Mode:               CTMEnd, | ||||
| 		ValuePrimary:       "", | ||||
| 		ValueSecondary:     "", | ||||
| 		Direction:          "", | ||||
| 		DirectionSecondary: "", | ||||
| 		PageSize:           0, | ||||
| 		Extra:              Extra{}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (c CTKeySort) Token() string { | ||||
|  | ||||
| 	if c.Mode == CTMStart { | ||||
| 		return "@start" | ||||
| 	} | ||||
| 	if c.Mode == CTMEnd { | ||||
| 		return "@end" | ||||
| 	} | ||||
|  | ||||
| 	// We kinda manually implement omitempty for the CursorToken here | ||||
| 	// because omitempty does not work for time.Time and otherwise we would always | ||||
| 	// get weird time values when decoding a token that initially didn't have an Timestamp set | ||||
| 	// For this usecase we treat Unix=0 as an empty timestamp | ||||
|  | ||||
| 	sertok := cursorTokenKeySortSerialize{} | ||||
|  | ||||
| 	if c.ValuePrimary != "" { | ||||
| 		sertok.ValuePrimary = &c.ValuePrimary | ||||
| 	} | ||||
| 	if c.ValueSecondary != "" { | ||||
| 		sertok.ValueSecondary = &c.ValueSecondary | ||||
| 	} | ||||
| 	if c.Direction != "" { | ||||
| 		sertok.Direction = &c.Direction | ||||
| 	} | ||||
| 	if c.DirectionSecondary != "" { | ||||
| 		sertok.DirectionSecondary = &c.DirectionSecondary | ||||
| 	} | ||||
| 	if c.PageSize != 0 { | ||||
| 		sertok.PageSize = &c.PageSize | ||||
| 	} | ||||
|  | ||||
| 	sertok.ExtraTimestamp = c.Extra.Timestamp | ||||
| 	sertok.ExtraId = c.Extra.Id | ||||
| 	sertok.ExtraPage = c.Extra.Page | ||||
| 	sertok.ExtraPageSize = c.Extra.PageSize | ||||
|  | ||||
| 	body, err := json.Marshal(sertok) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
|  | ||||
| 	return "tok_" + base32.StdEncoding.EncodeToString(body) | ||||
| } | ||||
|  | ||||
| func (c CTKeySort) IsEnd() bool { | ||||
| 	return c.Mode == CTMEnd | ||||
| } | ||||
|  | ||||
| func (c CTKeySort) IsStart() bool { | ||||
| 	return c.Mode == CTMStart | ||||
| } | ||||
|  | ||||
| func (c CTKeySort) valuePrimaryObjectId() (primitive.ObjectID, bool) { | ||||
| 	if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil { | ||||
| 		return oid, true | ||||
| 	} else { | ||||
| 		return primitive.ObjectID{}, false | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (c CTKeySort) valueSecondaryObjectId() (primitive.ObjectID, bool) { | ||||
| 	if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil { | ||||
| 		return oid, true | ||||
| 	} else { | ||||
| 		return primitive.ObjectID{}, false | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										41
									
								
								cursortoken/tokenPaginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								cursortoken/tokenPaginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| package cursortoken | ||||
|  | ||||
| import "strconv" | ||||
|  | ||||
| type CTPaginated struct { | ||||
| 	Mode Mode | ||||
| 	Page int | ||||
| } | ||||
|  | ||||
| func Page(p int) CursorToken { | ||||
| 	return CTPaginated{ | ||||
| 		Mode: CTMNormal, | ||||
| 		Page: p, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func PageEnd() CursorToken { | ||||
| 	return CTPaginated{ | ||||
| 		Mode: CTMEnd, | ||||
| 		Page: 0, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (c CTPaginated) Token() string { | ||||
| 	if c.Mode == CTMStart { | ||||
| 		return "$1" | ||||
| 	} | ||||
| 	if c.Mode == CTMEnd { | ||||
| 		return "$end" | ||||
| 	} | ||||
|  | ||||
| 	return "$" + strconv.Itoa(c.Page) | ||||
| } | ||||
|  | ||||
| func (c CTPaginated) IsEnd() bool { | ||||
| 	return c.Mode == CTMEnd | ||||
| } | ||||
|  | ||||
| func (c CTPaginated) IsStart() bool { | ||||
| 	return c.Mode == CTMStart || c.Page == 1 | ||||
| } | ||||
| @@ -115,6 +115,9 @@ func (b *bufferedReadCloser) BufferedAll() ([]byte, error) { | ||||
| 				return nil, err | ||||
| 			} | ||||
| 		} | ||||
| 		if err := b.Reset(); err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 		return b.buffer, nil | ||||
|  | ||||
| 	case modeSourceFinished: | ||||
| @@ -131,10 +134,22 @@ func (b *bufferedReadCloser) BufferedAll() ([]byte, error) { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Reset resets the buffer to the beginning of the buffer. | ||||
| // If the original source is partially read, we will finish reading it and fill our buffer | ||||
| func (b *bufferedReadCloser) Reset() error { | ||||
| 	switch b.mode { | ||||
| 	case modeSourceReading: | ||||
| 		fallthrough | ||||
| 		if b.off == 0 { | ||||
| 			return nil // nobody has read anything yet | ||||
| 		} | ||||
| 		err := b.Close() | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		b.mode = modeBufferReading | ||||
| 		b.off = 0 | ||||
| 		return nil | ||||
|  | ||||
| 	case modeSourceFinished: | ||||
| 		err := b.Close() | ||||
| 		if err != nil { | ||||
|   | ||||
| @@ -3,6 +3,7 @@ package dataext | ||||
| import ( | ||||
| 	"encoding/json" | ||||
| 	"errors" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| type JsonOpt[T any] struct { | ||||
| @@ -10,6 +11,14 @@ type JsonOpt[T any] struct { | ||||
| 	value T | ||||
| } | ||||
|  | ||||
| func NewJsonOpt[T any](v T) JsonOpt[T] { | ||||
| 	return JsonOpt[T]{isSet: true, value: v} | ||||
| } | ||||
|  | ||||
| func EmptyJsonOpt[T any]() JsonOpt[T] { | ||||
| 	return JsonOpt[T]{isSet: false} | ||||
| } | ||||
|  | ||||
| // MarshalJSON returns m as the JSON encoding of m. | ||||
| func (m JsonOpt[T]) MarshalJSON() ([]byte, error) { | ||||
| 	if !m.isSet { | ||||
| @@ -51,9 +60,24 @@ func (m JsonOpt[T]) ValueOrNil() *T { | ||||
| 	return &m.value | ||||
| } | ||||
|  | ||||
| func (m JsonOpt[T]) ValueDblPtrOrNil() **T { | ||||
| 	if !m.isSet { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return langext.DblPtr(m.value) | ||||
| } | ||||
|  | ||||
| func (m JsonOpt[T]) MustValue() T { | ||||
| 	if !m.isSet { | ||||
| 		panic("value not set") | ||||
| 	} | ||||
| 	return m.value | ||||
| } | ||||
|  | ||||
| func (m JsonOpt[T]) IfSet(fn func(v T)) bool { | ||||
| 	if !m.isSet { | ||||
| 		return false | ||||
| 	} | ||||
| 	fn(m.value) | ||||
| 	return true | ||||
| } | ||||
|   | ||||
							
								
								
									
										144
									
								
								dataext/ringBuffer.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										144
									
								
								dataext/ringBuffer.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,144 @@ | ||||
| package dataext | ||||
|  | ||||
| import "iter" | ||||
|  | ||||
| type RingBuffer[T any] struct { | ||||
| 	items    []T // | ||||
| 	capacity int // max number of items the buffer can hold | ||||
| 	size     int // how many items are in the buffer | ||||
| 	head     int // ptr to next item | ||||
| } | ||||
|  | ||||
| func NewRingBuffer[T any](capacity int) *RingBuffer[T] { | ||||
| 	return &RingBuffer[T]{ | ||||
| 		items:    make([]T, capacity), | ||||
| 		capacity: capacity, | ||||
| 		size:     0, | ||||
| 		head:     0, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Push(item T) { | ||||
| 	if rb.size < rb.capacity { | ||||
| 		rb.size++ | ||||
| 	} | ||||
| 	rb.items[rb.head] = item | ||||
| 	rb.head = (rb.head + 1) % rb.capacity | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) PushPop(item T) *T { | ||||
| 	if rb.size < rb.capacity { | ||||
| 		rb.size++ | ||||
| 		rb.items[rb.head] = item | ||||
| 		rb.head = (rb.head + 1) % rb.capacity | ||||
| 		return nil | ||||
| 	} else { | ||||
| 		prev := rb.items[rb.head] | ||||
| 		rb.items[rb.head] = item | ||||
| 		rb.head = (rb.head + 1) % rb.capacity | ||||
| 		return &prev | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Peek() (T, bool) { | ||||
| 	if rb.size == 0 { | ||||
| 		return *new(T), false | ||||
| 	} | ||||
| 	return rb.items[(rb.head-1+rb.capacity)%rb.capacity], true | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Items() []T { | ||||
| 	if rb.size < rb.capacity { | ||||
| 		return rb.items[:rb.size] | ||||
| 	} | ||||
| 	return append(rb.items[rb.head:], rb.items[:rb.head]...) | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Size() int { | ||||
| 	return rb.size | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Capacity() int { | ||||
| 	return rb.capacity | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Clear() { | ||||
| 	rb.size = 0 | ||||
| 	rb.head = 0 | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) IsFull() bool { | ||||
| 	return rb.size == rb.capacity | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) At(i int) T { | ||||
| 	if i < 0 || i >= rb.size { | ||||
| 		panic("Index out of bounds") | ||||
| 	} | ||||
| 	if rb.size < rb.capacity { | ||||
| 		return rb.items[i] | ||||
| 	} | ||||
| 	return rb.items[(rb.head+i)%rb.capacity] | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Get(i int) (T, bool) { | ||||
| 	if i < 0 || i >= rb.size { | ||||
| 		return *new(T), false | ||||
| 	} | ||||
| 	if rb.size < rb.capacity { | ||||
| 		return rb.items[i], true | ||||
| 	} | ||||
| 	return rb.items[(rb.head+i)%rb.capacity], true | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Iter() iter.Seq[T] { | ||||
| 	return func(yield func(T) bool) { | ||||
| 		for i := 0; i < rb.size; i++ { | ||||
| 			if !yield(rb.At(i)) { | ||||
| 				return | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Iter2() iter.Seq2[int, T] { | ||||
| 	return func(yield func(int, T) bool) { | ||||
| 		for i := 0; i < rb.size; i++ { | ||||
| 			if !yield(i, rb.At(i)) { | ||||
| 				return | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (rb *RingBuffer[T]) Remove(fnEqual func(v T) bool) int { | ||||
| 	// Mike [2024-11-13]: I *really* tried to write an in-place algorithm to remove elements | ||||
| 	//                    But after carful consideration, I left that as an exercise for future readers | ||||
| 	//                    It is, suprisingly, non-trivial, especially because the head-ptr must be weirdly updated | ||||
| 	//                    And out At() method does not work correctly with {head<>0 && size<capacity} | ||||
|  | ||||
| 	dc := 0 | ||||
| 	b := make([]T, rb.capacity) | ||||
| 	bsize := 0 | ||||
|  | ||||
| 	for i := 0; i < rb.size; i++ { | ||||
| 		comp := rb.At(i) | ||||
| 		if fnEqual(comp) { | ||||
| 			dc++ | ||||
| 		} else { | ||||
| 			b[bsize] = comp | ||||
| 			bsize++ | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if dc == 0 { | ||||
| 		return 0 | ||||
| 	} | ||||
|  | ||||
| 	rb.items = b | ||||
| 	rb.size = bsize | ||||
| 	rb.head = bsize % rb.capacity | ||||
|  | ||||
| 	return dc | ||||
|  | ||||
| } | ||||
							
								
								
									
										447
									
								
								dataext/ringBuffer_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										447
									
								
								dataext/ringBuffer_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,447 @@ | ||||
| package dataext | ||||
|  | ||||
| import "testing" | ||||
|  | ||||
| func TestRingBufferPushAddsItem(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	if rb.Size() != 1 { | ||||
| 		t.Errorf("Expected size 1, got %d", rb.Size()) | ||||
| 	} | ||||
| 	if item, _ := rb.Peek(); item != 1 { | ||||
| 		t.Errorf("Expected item 1, got %d", item) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferPushPopReturnsOldestItem(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	if item := rb.PushPop(4); item == nil || *item != 1 { | ||||
| 		t.Errorf("Expected item 1, got %v", item) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferPeekReturnsLastPushedItem(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	if item, _ := rb.Peek(); item != 2 { | ||||
| 		t.Errorf("Expected item 2, got %d", item) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferOverflow1(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) // overriden | ||||
| 	rb.Push(2) // overriden | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(9) | ||||
| 	rb.Push(4) | ||||
| 	rb.Push(5) | ||||
| 	rb.Push(7) | ||||
| 	if rb.Size() != 5 { | ||||
| 		t.Errorf("Expected size 4, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{3, 9, 4, 5, 7} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferItemsReturnsAllItems(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	items := rb.Items() | ||||
| 	expected := []int{1, 2, 3} | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferClearEmptiesBuffer(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Clear() | ||||
| 	if rb.Size() != 0 { | ||||
| 		t.Errorf("Expected size 0, got %d", rb.Size()) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferIsFullReturnsTrueWhenFull(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	if !rb.IsFull() { | ||||
| 		t.Errorf("Expected buffer to be full") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferAtReturnsCorrectItem(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	if item := rb.At(1); item != 2 { | ||||
| 		t.Errorf("Expected item 2, got %d", item) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferGetReturnsCorrectItem(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	if item, ok := rb.Get(1); !ok || item != 2 { | ||||
| 		t.Errorf("Expected item 2, got %d", item) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveDeletesMatchingItems(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(4) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 2 }) | ||||
| 	if removed != 2 { | ||||
| 		t.Errorf("Expected 2 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 3 { | ||||
| 		t.Errorf("Expected size 3, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{1, 3, 4} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveDeletesMatchingItems2(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(4) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 3 }) | ||||
| 	if removed != 1 { | ||||
| 		t.Errorf("Expected 2 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 4 { | ||||
| 		t.Errorf("Expected size 3, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{1, 2, 2, 4} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveDeletesMatchingItems3(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(9) | ||||
| 	rb.Push(4) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 3 }) | ||||
| 	if removed != 1 { | ||||
| 		t.Errorf("Expected 2 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 4 { | ||||
| 		t.Errorf("Expected size 3, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{1, 2, 9, 4} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveDeletesMatchingItems4(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) // overriden | ||||
| 	rb.Push(2) // overriden | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(9) | ||||
| 	rb.Push(4) | ||||
| 	rb.Push(5) | ||||
| 	rb.Push(7) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 7 }) | ||||
| 	if removed != 1 { | ||||
| 		t.Errorf("Expected 1 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 4 { | ||||
| 		t.Errorf("Expected size 4, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{3, 9, 4, 5} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveDeletesMatchingItems5(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) // overriden | ||||
| 	rb.Push(2) // overriden | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(9) | ||||
| 	rb.Push(4) | ||||
| 	rb.Push(5) | ||||
| 	rb.Push(7) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 3 }) | ||||
| 	if removed != 1 { | ||||
| 		t.Errorf("Expected 1 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 4 { | ||||
| 		t.Errorf("Expected size 4, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{9, 4, 5, 7} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveDeletesMatchingItems6(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) // overriden | ||||
| 	rb.Push(2) // overriden | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(9) | ||||
| 	rb.Push(4) | ||||
| 	rb.Push(5) | ||||
| 	rb.Push(7) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 1 }) | ||||
| 	if removed != 0 { | ||||
| 		t.Errorf("Expected 0 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 5 { | ||||
| 		t.Errorf("Expected size 5, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{3, 9, 4, 5, 7} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| 	if !rb.IsFull() { | ||||
| 		t.Errorf("Expected buffer to not be full") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveDeletesMatchingItems7(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) // overriden | ||||
| 	rb.Push(2) // overriden | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(9) | ||||
| 	rb.Push(4) | ||||
| 	rb.Push(5) | ||||
| 	rb.Push(7) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 9 }) | ||||
| 	if removed != 1 { | ||||
| 		t.Errorf("Expected 1 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 4 { | ||||
| 		t.Errorf("Expected size 4, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{3, 4, 5, 7} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| 	if rb.IsFull() { | ||||
| 		t.Errorf("Expected buffer to not be full") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferAddItemsToFullRingBuffer(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(4) | ||||
| 	if rb.Size() != 3 { | ||||
| 		t.Errorf("Expected size 3, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{2, 3, 4} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferAddItemsToNonFullRingBuffer(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	if rb.Size() != 2 { | ||||
| 		t.Errorf("Expected size 2, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{1, 2} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveItemsFromNonFullRingBuffer(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 1 }) | ||||
| 	if removed != 1 { | ||||
| 		t.Errorf("Expected 1 item removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 1 { | ||||
| 		t.Errorf("Expected size 1, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{2} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveItemsFromFullRingBuffer(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 2 }) | ||||
| 	if removed != 1 { | ||||
| 		t.Errorf("Expected 1 item removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 2 { | ||||
| 		t.Errorf("Expected size 2, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{1, 3} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveMultipleItemsFromRingBuffer(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](5) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(4) | ||||
| 	removed := rb.Remove(func(v int) bool { return v == 2 }) | ||||
| 	if removed != 2 { | ||||
| 		t.Errorf("Expected 2 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 3 { | ||||
| 		t.Errorf("Expected size 3, got %d", rb.Size()) | ||||
| 	} | ||||
| 	expected := []int{1, 3, 4} | ||||
| 	items := rb.Items() | ||||
| 	for i, item := range items { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveAllItemsFromRingBuffer(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	removed := rb.Remove(func(v int) bool { return true }) | ||||
| 	if removed != 3 { | ||||
| 		t.Errorf("Expected 3 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 0 { | ||||
| 		t.Errorf("Expected size 0, got %d", rb.Size()) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferRemoveNoItemsFromRingBuffer(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	removed := rb.Remove(func(v int) bool { return false }) | ||||
| 	if removed != 0 { | ||||
| 		t.Errorf("Expected 0 items removed, got %d", removed) | ||||
| 	} | ||||
| 	if rb.Size() != 3 { | ||||
| 		t.Errorf("Expected size 3, got %d", rb.Size()) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferIteratesOverAllItems(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	expected := []int{1, 2, 3} | ||||
| 	i := 0 | ||||
| 	for item := range rb.Iter() { | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 		i++ | ||||
| 	} | ||||
| 	if i != len(expected) { | ||||
| 		t.Errorf("Expected to iterate over %d items, but iterated over %d", len(expected), i) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestRingBufferIter2IteratesOverAllItemsWithIndices(t *testing.T) { | ||||
| 	rb := NewRingBuffer[int](3) | ||||
| 	rb.Push(1) | ||||
| 	rb.Push(2) | ||||
| 	rb.Push(3) | ||||
| 	expected := []int{1, 2, 3} | ||||
| 	i := 0 | ||||
| 	for index, item := range rb.Iter2() { | ||||
| 		if index != i { | ||||
| 			t.Errorf("Expected index %d, got %d", i, index) | ||||
| 		} | ||||
| 		if item != expected[i] { | ||||
| 			t.Errorf("Expected item %d, got %d", expected[i], item) | ||||
| 		} | ||||
| 		i++ | ||||
| 	} | ||||
| 	if i != len(expected) { | ||||
| 		t.Errorf("Expected to iterate over %d items, but iterated over %d", len(expected), i) | ||||
| 	} | ||||
| } | ||||
| @@ -7,6 +7,10 @@ type SyncMap[TKey comparable, TData any] struct { | ||||
| 	lock sync.Mutex | ||||
| } | ||||
|  | ||||
| func NewSyncMap[TKey comparable, TData any]() *SyncMap[TKey, TData] { | ||||
| 	return &SyncMap[TKey, TData]{data: make(map[TKey]TData), lock: sync.Mutex{}} | ||||
| } | ||||
|  | ||||
| func (s *SyncMap[TKey, TData]) Set(key TKey, data TData) { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|   | ||||
							
								
								
									
										143
									
								
								dataext/syncRingSet.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								dataext/syncRingSet.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | ||||
| package dataext | ||||
|  | ||||
| import "sync" | ||||
|  | ||||
| type SyncRingSet[TData comparable] struct { | ||||
| 	data map[TData]bool | ||||
| 	lock sync.Mutex | ||||
| 	ring *RingBuffer[TData] | ||||
| } | ||||
|  | ||||
| func NewSyncRingSet[TData comparable](capacity int) *SyncRingSet[TData] { | ||||
| 	return &SyncRingSet[TData]{ | ||||
| 		data: make(map[TData]bool, capacity+1), | ||||
| 		lock: sync.Mutex{}, | ||||
| 		ring: NewRingBuffer[TData](capacity), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Add adds `value` to the set | ||||
| // returns true  if the value was actually inserted (value did not exist beforehand) | ||||
| // returns false if the value already existed | ||||
| func (s *SyncRingSet[TData]) Add(value TData) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TData]bool) | ||||
| 	} | ||||
|  | ||||
| 	_, existsInPreState := s.data[value] | ||||
| 	if existsInPreState { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	prev := s.ring.PushPop(value) | ||||
|  | ||||
| 	s.data[value] = true | ||||
| 	if prev != nil { | ||||
| 		delete(s.data, *prev) | ||||
| 	} | ||||
|  | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| func (s *SyncRingSet[TData]) AddAll(values []TData) { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TData]bool) | ||||
| 	} | ||||
|  | ||||
| 	for _, value := range values { | ||||
| 		_, existsInPreState := s.data[value] | ||||
| 		if existsInPreState { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		prev := s.ring.PushPop(value) | ||||
|  | ||||
| 		s.data[value] = true | ||||
| 		if prev != nil { | ||||
| 			delete(s.data, *prev) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (s *SyncRingSet[TData]) Remove(value TData) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TData]bool) | ||||
| 	} | ||||
|  | ||||
| 	_, existsInPreState := s.data[value] | ||||
| 	if !existsInPreState { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	delete(s.data, value) | ||||
| 	s.ring.Remove(func(v TData) bool { return value == v }) | ||||
|  | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| func (s *SyncRingSet[TData]) RemoveAll(values []TData) { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TData]bool) | ||||
| 	} | ||||
|  | ||||
| 	for _, value := range values { | ||||
| 		delete(s.data, value) | ||||
| 		s.ring.Remove(func(v TData) bool { return value == v }) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (s *SyncRingSet[TData]) Contains(value TData) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TData]bool) | ||||
| 	} | ||||
|  | ||||
| 	_, ok := s.data[value] | ||||
|  | ||||
| 	return ok | ||||
| } | ||||
|  | ||||
| func (s *SyncRingSet[TData]) Get() []TData { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TData]bool) | ||||
| 	} | ||||
|  | ||||
| 	r := make([]TData, 0, len(s.data)) | ||||
|  | ||||
| 	for k := range s.data { | ||||
| 		r = append(r, k) | ||||
| 	} | ||||
|  | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| // AddIfNotContains | ||||
| // returns true  if the value was actually added (value did not exist beforehand) | ||||
| // returns false if the value already existed | ||||
| func (s *SyncRingSet[TData]) AddIfNotContains(key TData) bool { | ||||
| 	return s.Add(key) | ||||
| } | ||||
|  | ||||
| // RemoveIfContains | ||||
| // returns true  if the value was actually removed (value did exist beforehand) | ||||
| // returns false if the value did not exist in the set | ||||
| func (s *SyncRingSet[TData]) RemoveIfContains(key TData) bool { | ||||
| 	return s.Remove(key) | ||||
| } | ||||
| @@ -7,8 +7,12 @@ type SyncSet[TData comparable] struct { | ||||
| 	lock sync.Mutex | ||||
| } | ||||
|  | ||||
| func NewSyncSet[TData comparable]() *SyncSet[TData] { | ||||
| 	return &SyncSet[TData]{data: make(map[TData]bool), lock: sync.Mutex{}} | ||||
| } | ||||
|  | ||||
| // Add adds `value` to the set | ||||
| // returns true  if the value was actually inserted | ||||
| // returns true  if the value was actually inserted (value did not exist beforehand) | ||||
| // returns false if the value already existed | ||||
| func (s *SyncSet[TData]) Add(value TData) bool { | ||||
| 	s.lock.Lock() | ||||
| @@ -19,9 +23,12 @@ func (s *SyncSet[TData]) Add(value TData) bool { | ||||
| 	} | ||||
|  | ||||
| 	_, existsInPreState := s.data[value] | ||||
| 	s.data[value] = true | ||||
| 	if existsInPreState { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	return !existsInPreState | ||||
| 	s.data[value] = true | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| func (s *SyncSet[TData]) AddAll(values []TData) { | ||||
| @@ -37,6 +44,36 @@ func (s *SyncSet[TData]) AddAll(values []TData) { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (s *SyncSet[TData]) Remove(value TData) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TData]bool) | ||||
| 	} | ||||
|  | ||||
| 	_, existsInPreState := s.data[value] | ||||
| 	if !existsInPreState { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	delete(s.data, value) | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| func (s *SyncSet[TData]) RemoveAll(values []TData) { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
|  | ||||
| 	if s.data == nil { | ||||
| 		s.data = make(map[TData]bool) | ||||
| 	} | ||||
|  | ||||
| 	for _, value := range values { | ||||
| 		delete(s.data, value) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (s *SyncSet[TData]) Contains(value TData) bool { | ||||
| 	s.lock.Lock() | ||||
| 	defer s.lock.Unlock() | ||||
| @@ -66,3 +103,17 @@ func (s *SyncSet[TData]) Get() []TData { | ||||
|  | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| // AddIfNotContains | ||||
| // returns true  if the value was actually added (value did not exist beforehand) | ||||
| // returns false if the value already existed | ||||
| func (s *SyncSet[TData]) AddIfNotContains(key TData) bool { | ||||
| 	return s.Add(key) | ||||
| } | ||||
|  | ||||
| // RemoveIfContains | ||||
| // returns true  if the value was actually removed (value did exist beforehand) | ||||
| // returns false if the value did not exist in the set | ||||
| func (s *SyncSet[TData]) RemoveIfContains(key TData) bool { | ||||
| 	return s.Remove(key) | ||||
| } | ||||
|   | ||||
| @@ -19,6 +19,14 @@ func (s Single[T1]) TupleValues() []any { | ||||
| 	return []any{s.V1} | ||||
| } | ||||
|  | ||||
| func NewSingle[T1 any](v1 T1) Single[T1] { | ||||
| 	return Single[T1]{V1: v1} | ||||
| } | ||||
|  | ||||
| func NewTuple1[T1 any](v1 T1) Single[T1] { | ||||
| 	return Single[T1]{V1: v1} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| type Tuple[T1 any, T2 any] struct { | ||||
| @@ -34,6 +42,14 @@ func (t Tuple[T1, T2]) TupleValues() []any { | ||||
| 	return []any{t.V1, t.V2} | ||||
| } | ||||
|  | ||||
| func NewTuple[T1 any, T2 any](v1 T1, v2 T2) Tuple[T1, T2] { | ||||
| 	return Tuple[T1, T2]{V1: v1, V2: v2} | ||||
| } | ||||
|  | ||||
| func NewTuple2[T1 any, T2 any](v1 T1, v2 T2) Tuple[T1, T2] { | ||||
| 	return Tuple[T1, T2]{V1: v1, V2: v2} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| type Triple[T1 any, T2 any, T3 any] struct { | ||||
| @@ -50,6 +66,14 @@ func (t Triple[T1, T2, T3]) TupleValues() []any { | ||||
| 	return []any{t.V1, t.V2, t.V3} | ||||
| } | ||||
|  | ||||
| func NewTriple[T1 any, T2 any, T3 any](v1 T1, v2 T2, v3 T3) Triple[T1, T2, T3] { | ||||
| 	return Triple[T1, T2, T3]{V1: v1, V2: v2, V3: v3} | ||||
| } | ||||
|  | ||||
| func NewTuple3[T1 any, T2 any, T3 any](v1 T1, v2 T2, v3 T3) Triple[T1, T2, T3] { | ||||
| 	return Triple[T1, T2, T3]{V1: v1, V2: v2, V3: v3} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| type Quadruple[T1 any, T2 any, T3 any, T4 any] struct { | ||||
| @@ -67,6 +91,14 @@ func (t Quadruple[T1, T2, T3, T4]) TupleValues() []any { | ||||
| 	return []any{t.V1, t.V2, t.V3, t.V4} | ||||
| } | ||||
|  | ||||
| func NewQuadruple[T1 any, T2 any, T3 any, T4 any](v1 T1, v2 T2, v3 T3, v4 T4) Quadruple[T1, T2, T3, T4] { | ||||
| 	return Quadruple[T1, T2, T3, T4]{V1: v1, V2: v2, V3: v3, V4: v4} | ||||
| } | ||||
|  | ||||
| func NewTuple4[T1 any, T2 any, T3 any, T4 any](v1 T1, v2 T2, v3 T3, v4 T4) Quadruple[T1, T2, T3, T4] { | ||||
| 	return Quadruple[T1, T2, T3, T4]{V1: v1, V2: v2, V3: v3, V4: v4} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| type Quintuple[T1 any, T2 any, T3 any, T4 any, T5 any] struct { | ||||
| @@ -86,6 +118,14 @@ func (t Quintuple[T1, T2, T3, T4, T5]) TupleValues() []any { | ||||
|  | ||||
| } | ||||
|  | ||||
| func NewQuintuple[T1 any, T2 any, T3 any, T4 any, T5 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5) Quintuple[T1, T2, T3, T4, T5] { | ||||
| 	return Quintuple[T1, T2, T3, T4, T5]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5} | ||||
| } | ||||
|  | ||||
| func NewTuple5[T1 any, T2 any, T3 any, T4 any, T5 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5) Quintuple[T1, T2, T3, T4, T5] { | ||||
| 	return Quintuple[T1, T2, T3, T4, T5]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| type Sextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any] struct { | ||||
| @@ -106,6 +146,14 @@ func (t Sextuple[T1, T2, T3, T4, T5, T6]) TupleValues() []any { | ||||
|  | ||||
| } | ||||
|  | ||||
| func NewSextuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6) Sextuple[T1, T2, T3, T4, T5, T6] { | ||||
| 	return Sextuple[T1, T2, T3, T4, T5, T6]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6} | ||||
| } | ||||
|  | ||||
| func NewTuple6[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6) Sextuple[T1, T2, T3, T4, T5, T6] { | ||||
| 	return Sextuple[T1, T2, T3, T4, T5, T6]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| type Septuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any] struct { | ||||
| @@ -126,6 +174,14 @@ func (t Septuple[T1, T2, T3, T4, T5, T6, T7]) TupleValues() []any { | ||||
| 	return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7} | ||||
| } | ||||
|  | ||||
| func NewSeptuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7) Septuple[T1, T2, T3, T4, T5, T6, T7] { | ||||
| 	return Septuple[T1, T2, T3, T4, T5, T6, T7]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7} | ||||
| } | ||||
|  | ||||
| func NewTuple7[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7) Septuple[T1, T2, T3, T4, T5, T6, T7] { | ||||
| 	return Septuple[T1, T2, T3, T4, T5, T6, T7]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| type Octuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any] struct { | ||||
| @@ -147,6 +203,14 @@ func (t Octuple[T1, T2, T3, T4, T5, T6, T7, T8]) TupleValues() []any { | ||||
| 	return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8} | ||||
| } | ||||
|  | ||||
| func NewOctuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8) Octuple[T1, T2, T3, T4, T5, T6, T7, T8] { | ||||
| 	return Octuple[T1, T2, T3, T4, T5, T6, T7, T8]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8} | ||||
| } | ||||
|  | ||||
| func NewTuple8[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8) Octuple[T1, T2, T3, T4, T5, T6, T7, T8] { | ||||
| 	return Octuple[T1, T2, T3, T4, T5, T6, T7, T8]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| type Nonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any] struct { | ||||
| @@ -168,3 +232,10 @@ func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleLength() int { | ||||
| func (t Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]) TupleValues() []any { | ||||
| 	return []any{t.V1, t.V2, t.V3, t.V4, t.V5, t.V6, t.V7, t.V8, t.V9} | ||||
| } | ||||
|  | ||||
| func NewNonuple[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8, v9 T9) Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9] { | ||||
| 	return Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8, V9: v9} | ||||
| } | ||||
| func NewTuple9[T1 any, T2 any, T3 any, T4 any, T5 any, T6 any, T7 any, T8 any, T9 any](v1 T1, v2 T2, v3 T3, v4 T4, v5 T5, v6 T6, v7 T7, v8 T8, v9 T9) Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9] { | ||||
| 	return Nonuple[T1, T2, T3, T4, T5, T6, T7, T8, T9]{V1: v1, V2: v2, V3: v3, V4: v4, V5: v5, V6: v6, V7: v7, V8: v8, V9: v9} | ||||
| } | ||||
|   | ||||
| @@ -30,6 +30,10 @@ import ( | ||||
| // If possible add metadata to the error (eg the id that was not found, ...), the methods are the same as in zerolog | ||||
| //     return nil, exerror.Wrap(err, "do something failed").Str("someid", id).Int("count", in.Count).Build() | ||||
| // | ||||
| // You can also add extra-data to an error with Extra(..) | ||||
| // in contrast to metadata is extradata always printed in the resulting error and is more intended for additional (programmatically readable) data in addition to the errortype | ||||
| // (metadata is more internal debug info/help) | ||||
| // | ||||
| // You can change the errortype with `.User()` and `.System()` (User-errors are 400 and System-errors 500) | ||||
| // You can also manually set the statuscode with `.WithStatuscode(http.NotFound)` | ||||
| // You can set the type with `WithType(..)` | ||||
| @@ -76,12 +80,14 @@ func Wrap(err error, msg string) *Builder { | ||||
| 		return &Builder{errorData: newExErr(CatSystem, TypeInternal, msg)} // prevent NPE if we call Wrap with err==nil | ||||
| 	} | ||||
|  | ||||
| 	v := FromError(err) | ||||
|  | ||||
| 	if !pkgconfig.RecursiveErrors { | ||||
| 		v := FromError(err) | ||||
| 		v.Message = msg | ||||
| 		return &Builder{wrappedErr: err, errorData: v} | ||||
| 	} else { | ||||
| 		return &Builder{wrappedErr: err, errorData: wrapExErr(v, msg, CatWrap, 1)} | ||||
| 	} | ||||
| 	return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
| @@ -368,29 +374,6 @@ func (b *Builder) CtxData(method Method, ctx context.Context) *Builder { | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func formatHeader(header map[string][]string) string { | ||||
| 	ml := 1 | ||||
| 	for k, _ := range header { | ||||
| 		if len(k) > ml { | ||||
| 			ml = len(k) | ||||
| 		} | ||||
| 	} | ||||
| 	r := "" | ||||
| 	for k, v := range header { | ||||
| 		if r != "" { | ||||
| 			r += "\n" | ||||
| 		} | ||||
| 		for _, hval := range v { | ||||
| 			value := hval | ||||
| 			value = strings.ReplaceAll(value, "\n", "\\n") | ||||
| 			value = strings.ReplaceAll(value, "\r", "\\r") | ||||
| 			value = strings.ReplaceAll(value, "\t", "\\t") | ||||
| 			r += langext.StrPadRight(k, " ", ml) + " := " + value | ||||
| 		} | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func extractHeader(header map[string][]string) []string { | ||||
| 	r := make([]string, 0, len(header)) | ||||
| 	for k, v := range header { | ||||
| @@ -407,6 +390,16 @@ func extractHeader(header map[string][]string) []string { | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| // Extra adds additional data to the error | ||||
| // this is not like the other metadata (like Id(), Str(), etc) | ||||
| // this data is public and will be printed/outputted | ||||
| func (b *Builder) Extra(key string, val any) *Builder { | ||||
| 	b.errorData.Extra[key] = val | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
|  | ||||
| // Build creates a new error, ready to pass up the stack | ||||
| // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | ||||
| // Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces | ||||
| @@ -457,7 +450,7 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) { | ||||
|  | ||||
| // Print prints the error | ||||
| // If the error is SevErr we also send it to the error-service | ||||
| func (b *Builder) Print(ctxs ...context.Context) { | ||||
| func (b *Builder) Print(ctxs ...context.Context) Proxy { | ||||
| 	warnOnPkgConfigNotInitialized() | ||||
|  | ||||
| 	for _, dctx := range ctxs { | ||||
| @@ -468,9 +461,15 @@ func (b *Builder) Print(ctxs ...context.Context) { | ||||
| 		b.errorData.Log(pkgconfig.ZeroLogger.Error()) | ||||
| 	} else if b.errorData.Severity == SevWarn { | ||||
| 		b.errorData.ShortLog(pkgconfig.ZeroLogger.Warn()) | ||||
| 	} else if b.errorData.Severity == SevInfo { | ||||
| 		b.errorData.ShortLog(pkgconfig.ZeroLogger.Info()) | ||||
| 	} else { | ||||
| 		b.errorData.ShortLog(pkgconfig.ZeroLogger.Debug()) | ||||
| 	} | ||||
|  | ||||
| 	b.errorData.CallListener(MethodPrint) | ||||
|  | ||||
| 	return Proxy{v: *b.errorData} // we return Proxy<Exerr> here instead of Exerr to prevent warnings on ignored err-returns | ||||
| } | ||||
|  | ||||
| func (b *Builder) Format(level LogPrintLevel) string { | ||||
|   | ||||
| @@ -12,11 +12,78 @@ import ( | ||||
| var reflectTypeStr = reflect.TypeOf("") | ||||
|  | ||||
| func FromError(err error) *ExErr { | ||||
|  | ||||
| 	if err == nil { | ||||
| 		// prevent NPE if we call FromError with err==nil | ||||
| 		return &ExErr{ | ||||
| 			UniqueID:       newID(), | ||||
| 			Category:       CatForeign, | ||||
| 			Type:           TypeInternal, | ||||
| 			Severity:       SevErr, | ||||
| 			Timestamp:      time.Time{}, | ||||
| 			StatusCode:     nil, | ||||
| 			Message:        "", | ||||
| 			WrappedErrType: "nil", | ||||
| 			WrappedErr:     err, | ||||
| 			Caller:         "", | ||||
| 			OriginalError:  nil, | ||||
| 			Meta:           make(MetaMap), | ||||
| 			Extra:          make(map[string]any), | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	//goland:noinspection GoTypeAssertionOnErrors | ||||
| 	if verr, ok := err.(*ExErr); ok { | ||||
| 		// A simple ExErr | ||||
| 		return verr | ||||
| 	} | ||||
|  | ||||
| 	//goland:noinspection GoTypeAssertionOnErrors | ||||
| 	if verr, ok := err.(langext.PanicWrappedErr); ok { | ||||
| 		return &ExErr{ | ||||
| 			UniqueID:       newID(), | ||||
| 			Category:       CatForeign, | ||||
| 			Type:           TypePanic, | ||||
| 			Severity:       SevErr, | ||||
| 			Timestamp:      time.Time{}, | ||||
| 			StatusCode:     nil, | ||||
| 			Message:        "A panic occured", | ||||
| 			WrappedErrType: fmt.Sprintf("%T", verr), | ||||
| 			WrappedErr:     err, | ||||
| 			Caller:         "", | ||||
| 			OriginalError:  nil, | ||||
| 			Meta: MetaMap{ | ||||
| 				"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())}, | ||||
| 				"panic_type":   {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())}, | ||||
| 				"stack":        {DataType: MDTString, Value: verr.Stack}, | ||||
| 			}, | ||||
| 			Extra: make(map[string]any), | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	//goland:noinspection GoTypeAssertionOnErrors | ||||
| 	if verr, ok := err.(*langext.PanicWrappedErr); ok && verr != nil { | ||||
| 		return &ExErr{ | ||||
| 			UniqueID:       newID(), | ||||
| 			Category:       CatForeign, | ||||
| 			Type:           TypePanic, | ||||
| 			Severity:       SevErr, | ||||
| 			Timestamp:      time.Time{}, | ||||
| 			StatusCode:     nil, | ||||
| 			Message:        "A panic occured", | ||||
| 			WrappedErrType: fmt.Sprintf("%T", verr), | ||||
| 			WrappedErr:     err, | ||||
| 			Caller:         "", | ||||
| 			OriginalError:  nil, | ||||
| 			Meta: MetaMap{ | ||||
| 				"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())}, | ||||
| 				"panic_type":   {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())}, | ||||
| 				"stack":        {DataType: MDTString, Value: verr.Stack}, | ||||
| 			}, | ||||
| 			Extra: make(map[string]any), | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	// A foreign error (eg a MongoDB exception) | ||||
| 	return &ExErr{ | ||||
| 		UniqueID:       newID(), | ||||
| @@ -31,6 +98,7 @@ func FromError(err error) *ExErr { | ||||
| 		Caller:         "", | ||||
| 		OriginalError:  nil, | ||||
| 		Meta:           getForeignMeta(err), | ||||
| 		Extra:          make(map[string]any), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -48,6 +116,7 @@ func newExErr(cat ErrorCategory, errtype ErrorType, msg string) *ExErr { | ||||
| 		Caller:         callername(2), | ||||
| 		OriginalError:  nil, | ||||
| 		Meta:           make(map[string]MetaValue), | ||||
| 		Extra:          make(map[string]any), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -65,6 +134,7 @@ func wrapExErr(e *ExErr, msg string, cat ErrorCategory, stacktraceskip int) *ExE | ||||
| 		Caller:         callername(1 + stacktraceskip), | ||||
| 		OriginalError:  e, | ||||
| 		Meta:           make(map[string]MetaValue), | ||||
| 		Extra:          langext.CopyMap(langext.ForceMap(e.Extra)), | ||||
| 	} | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -23,6 +23,7 @@ var ( | ||||
| 	TypeInternal       = NewType("INTERNAL_ERROR", langext.Ptr(500)) | ||||
| 	TypePanic          = NewType("PANIC", langext.Ptr(500)) | ||||
| 	TypeNotImplemented = NewType("NOT_IMPLEMENTED", langext.Ptr(500)) | ||||
| 	TypeAssert         = NewType("ASSERT", langext.Ptr(500)) | ||||
|  | ||||
| 	TypeMongoQuery        = NewType("MONGO_QUERY", langext.Ptr(500)) | ||||
| 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| package exerr | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"github.com/rs/xid" | ||||
| 	"github.com/rs/zerolog" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| @@ -26,7 +27,8 @@ type ExErr struct { | ||||
|  | ||||
| 	OriginalError *ExErr `json:"originalError"` | ||||
|  | ||||
| 	Meta MetaMap `json:"meta"` | ||||
| 	Extra map[string]any `json:"extra"` | ||||
| 	Meta  MetaMap        `json:"meta"` | ||||
| } | ||||
|  | ||||
| func (ee *ExErr) Error() string { | ||||
| @@ -36,6 +38,13 @@ func (ee *ExErr) Error() string { | ||||
| // Unwrap must be implemented so that some error.XXX methods work | ||||
| func (ee *ExErr) Unwrap() error { | ||||
| 	if ee.OriginalError == nil { | ||||
|  | ||||
| 		if ee.WrappedErr != nil { | ||||
| 			if werr, ok := ee.WrappedErr.(error); ok { | ||||
| 				return werr | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		return nil // this is neccessary - otherwise we return a wrapped nil and the `x == nil` comparison fails (= panic in errors.Is and other failures) | ||||
| 	} | ||||
| 	return ee.OriginalError | ||||
| @@ -81,6 +90,23 @@ func (ee *ExErr) Log(evt *zerolog.Event) { | ||||
| } | ||||
|  | ||||
| func (ee *ExErr) FormatLog(lvl LogPrintLevel) string { | ||||
|  | ||||
| 	// [LogPrintShort] | ||||
| 	// | ||||
| 	// - Only print message and type | ||||
| 	// - Used e.g. for logging to the console when Build is called | ||||
| 	// - also used in Print() if level == Warn/Info | ||||
| 	// | ||||
| 	// [LogPrintOverview] | ||||
| 	// | ||||
| 	// - print message, extra and errortrace | ||||
| 	// | ||||
| 	// [LogPrintFull] | ||||
| 	// | ||||
| 	// - print full error, with meta and extra, and trace, etc | ||||
| 	// - Used in Output() and Print() | ||||
| 	// | ||||
|  | ||||
| 	if lvl == LogPrintShort { | ||||
|  | ||||
| 		msg := ee.Message | ||||
| @@ -101,6 +127,10 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string { | ||||
|  | ||||
| 		str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n" | ||||
|  | ||||
| 		for exk, exv := range ee.Extra { | ||||
| 			str += fmt.Sprintf(" # [[[ %s ==> %v ]]]\n", exk, exv) | ||||
| 		} | ||||
|  | ||||
| 		indent := "" | ||||
| 		for curr := ee; curr != nil; curr = curr.OriginalError { | ||||
| 			indent += "  " | ||||
| @@ -122,12 +152,16 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string { | ||||
|  | ||||
| 		str := "[" + ee.RecursiveType().Key + "] <" + ee.UniqueID + "> " + strings.ReplaceAll(ee.RecursiveMessage(), "\n", " ") + "\n" | ||||
|  | ||||
| 		for exk, exv := range ee.Extra { | ||||
| 			str += fmt.Sprintf(" # [[[ %s ==> %v ]]]\n", exk, exv) | ||||
| 		} | ||||
|  | ||||
| 		indent := "" | ||||
| 		for curr := ee; curr != nil; curr = curr.OriginalError { | ||||
| 			indent += "  " | ||||
|  | ||||
| 			etype := ee.Type.Key | ||||
| 			if ee.Type == TypeWrap { | ||||
| 			etype := curr.Type.Key | ||||
| 			if curr.Type == TypeWrap { | ||||
| 				etype = "~" | ||||
| 			} | ||||
|  | ||||
| @@ -171,7 +205,7 @@ func (ee *ExErr) ShortLog(evt *zerolog.Event) { | ||||
| } | ||||
|  | ||||
| // RecursiveMessage returns the message to show | ||||
| // = first error (top-down) that is not wrapping/foreign/empty | ||||
| // = first error (top-down) that is not foreign/empty | ||||
| // = lowest level error (that is not empty) | ||||
| // = fallback to self.message | ||||
| func (ee *ExErr) RecursiveMessage() string { | ||||
| @@ -179,7 +213,7 @@ func (ee *ExErr) RecursiveMessage() string { | ||||
| 	// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty | ||||
|  | ||||
| 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||
| 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | ||||
| 		if curr.Message != "" && curr.Category != CatForeign { | ||||
| 			return curr.Message | ||||
| 		} | ||||
| 	} | ||||
| @@ -328,6 +362,22 @@ func (ee *ExErr) GetMetaTime(key string) (time.Time, bool) { | ||||
| 	return time.Time{}, false | ||||
| } | ||||
|  | ||||
| func (ee *ExErr) GetExtra(key string) (any, bool) { | ||||
| 	if v, ok := ee.Extra[key]; ok { | ||||
| 		return v, true | ||||
| 	} | ||||
|  | ||||
| 	return nil, false | ||||
| } | ||||
|  | ||||
| func (ee *ExErr) UniqueIDs() []string { | ||||
| 	ids := []string{ee.UniqueID} | ||||
| 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||
| 		ids = append(ids, curr.UniqueID) | ||||
| 	} | ||||
| 	return ids | ||||
| } | ||||
|  | ||||
| // contains test if the supplied error is contained in this error (anywhere in the chain) | ||||
| func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) { | ||||
| 	if original == nil { | ||||
|   | ||||
							
								
								
									
										20
									
								
								exerr/gin.go
									
									
									
									
									
								
							
							
						
						
									
										20
									
								
								exerr/gin.go
									
									
									
									
									
								
							| @@ -48,6 +48,12 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | ||||
| 			metaJson[metaKey] = metaVal.rawValueForJson() | ||||
| 		} | ||||
| 		ginJson["meta"] = metaJson | ||||
|  | ||||
| 		extraJson := langext.H{} | ||||
| 		for extraKey, extraVal := range ee.Extra { | ||||
| 			extraJson[extraKey] = extraVal | ||||
| 		} | ||||
| 		ginJson["extra"] = extraJson | ||||
| 	} | ||||
|  | ||||
| 	if applyExtendListener { | ||||
| @@ -90,6 +96,20 @@ func (ee *ExErr) ToAPIJson(applyExtendListener bool, includeWrappedErrors bool, | ||||
| 		apiOutput["__data"] = ee.toJson(0, applyExtendListener, includeMetaFields) | ||||
| 	} | ||||
|  | ||||
| 	for exkey, exval := range ee.Extra { | ||||
|  | ||||
| 		// ensure we do not override existing values | ||||
| 		for { | ||||
| 			if _, ok := apiOutput[exkey]; ok { | ||||
| 				exkey = "_" + exkey | ||||
| 			} else { | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		apiOutput[exkey] = exval | ||||
| 	} | ||||
|  | ||||
| 	if applyExtendListener { | ||||
| 		pkgconfig.ExtendGinOutput(ee, apiOutput) | ||||
| 	} | ||||
|   | ||||
| @@ -86,3 +86,41 @@ func MessageMatch(e error, matcher func(string) bool) bool { | ||||
|  | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // OriginalError returns the lowest level error, probably the original/external error that was originally wrapped | ||||
| func OriginalError(e error) error { | ||||
| 	if e == nil { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	//goland:noinspection GoTypeAssertionOnErrors | ||||
| 	bmerr, ok := e.(*ExErr) | ||||
| 	for !ok { | ||||
| 		return e | ||||
| 	} | ||||
|  | ||||
| 	for bmerr.OriginalError != nil { | ||||
| 		bmerr = bmerr.OriginalError | ||||
| 	} | ||||
|  | ||||
| 	if bmerr.WrappedErr != nil { | ||||
| 		if werr, ok := bmerr.WrappedErr.(error); ok { | ||||
| 			return werr | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return bmerr | ||||
| } | ||||
|  | ||||
| func UniqueID(v error) *string { | ||||
| 	if v == nil { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	//goland:noinspection GoTypeAssertionOnErrors | ||||
| 	if verr, ok := v.(*ExErr); ok { | ||||
| 		return &verr.UniqueID | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|   | ||||
							
								
								
									
										13
									
								
								exerr/proxy.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								exerr/proxy.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
| package exerr | ||||
|  | ||||
| type Proxy struct { | ||||
| 	v ExErr | ||||
| } | ||||
|  | ||||
| func (p *Proxy) UniqueID() string { | ||||
| 	return p.v.UniqueID | ||||
| } | ||||
|  | ||||
| func (p *Proxy) Get() ExErr { | ||||
| 	return p.v | ||||
| } | ||||
| @@ -3,13 +3,17 @@ package ginext | ||||
| import ( | ||||
| 	"github.com/gin-gonic/gin" | ||||
| 	"net/http" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| func CorsMiddleware() gin.HandlerFunc { | ||||
| func CorsMiddleware(allowheader []string, exposeheader []string) gin.HandlerFunc { | ||||
| 	return func(c *gin.Context) { | ||||
| 		c.Writer.Header().Set("Access-Control-Allow-Origin", "*") | ||||
| 		c.Writer.Header().Set("Access-Control-Allow-Credentials", "true") | ||||
| 		c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With") | ||||
| 		c.Writer.Header().Set("Access-Control-Allow-Headers", strings.Join(allowheader, ", ")) | ||||
| 		if len(exposeheader) > 0 { | ||||
| 			c.Writer.Header().Set("Access-Control-Expose-Headers", strings.Join(exposeheader, ", ")) | ||||
| 		} | ||||
| 		c.Writer.Header().Set("Access-Control-Allow-Methods", "OPTIONS, GET, POST, PUT, PATCH, DELETE, COUNT") | ||||
|  | ||||
| 		if c.Request.Method == "OPTIONS" { | ||||
|   | ||||
| @@ -21,12 +21,16 @@ type GinWrapper struct { | ||||
|  | ||||
| 	opt                   Options | ||||
| 	allowCors             bool | ||||
| 	corsAllowHeader       []string | ||||
| 	corsExposeHeader      []string | ||||
| 	ginDebug              bool | ||||
| 	bufferBody            bool | ||||
| 	requestTimeout        time.Duration | ||||
| 	listenerBeforeRequest []func(g *gin.Context) | ||||
| 	listenerAfterRequest  []func(g *gin.Context, resp HTTPResponse) | ||||
|  | ||||
| 	buildRequestBindError func(g *gin.Context, fieldtype string, err error) HTTPResponse | ||||
|  | ||||
| 	routeSpecs []ginRouteSpec | ||||
| } | ||||
|  | ||||
| @@ -38,15 +42,18 @@ type ginRouteSpec struct { | ||||
| } | ||||
|  | ||||
| type Options struct { | ||||
| 	AllowCors                *bool                                     // Add cors handler to allow all CORS requests on the default http methods | ||||
| 	GinDebug                 *bool                                     // Set gin.debug to true (adds more logs) | ||||
| 	SuppressGinLogs          *bool                                     // Suppress our custom gin logs (even if GinDebug == true) | ||||
| 	BufferBody               *bool                                     // Buffers the input body stream, this way the ginext error handler can later include the whole request body | ||||
| 	Timeout                  *time.Duration                            // The default handler timeout | ||||
| 	ListenerBeforeRequest    []func(g *gin.Context)                    // Register listener that are called before the handler method | ||||
| 	ListenerAfterRequest     []func(g *gin.Context, resp HTTPResponse) // Register listener that are called after the handler method | ||||
| 	DebugTrimHandlerPrefixes []string                                  // Trim these prefixes from the handler names in the debug print | ||||
| 	DebugReplaceHandlerNames map[string]string                         // Replace handler names in debug output | ||||
| 	AllowCors                *bool                                                          // Add cors handler to allow all CORS requests on the default http methods | ||||
| 	CorsAllowHeader          *[]string                                                      // override the default values of Access-Control-Allow-Headers (AllowCors must be true) | ||||
| 	CorsExposeHeader         *[]string                                                      // return Access-Control-Expose-Headers (AllowCors must be true) | ||||
| 	GinDebug                 *bool                                                          // Set gin.debug to true (adds more logs) | ||||
| 	SuppressGinLogs          *bool                                                          // Suppress our custom gin logs (even if GinDebug == true) | ||||
| 	BufferBody               *bool                                                          // Buffers the input body stream, this way the ginext error handler can later include the whole request body | ||||
| 	Timeout                  *time.Duration                                                 // The default handler timeout | ||||
| 	ListenerBeforeRequest    []func(g *gin.Context)                                         // Register listener that are called before the handler method | ||||
| 	ListenerAfterRequest     []func(g *gin.Context, resp HTTPResponse)                      // Register listener that are called after the handler method | ||||
| 	DebugTrimHandlerPrefixes []string                                                       // Trim these prefixes from the handler names in the debug print | ||||
| 	DebugReplaceHandlerNames map[string]string                                              // Replace handler names in debug output | ||||
| 	BuildRequestBindError    func(g *gin.Context, fieldtype string, err error) HTTPResponse // Override function which generates the HTTPResponse errors that are returned by the preContext..Start() methids | ||||
| } | ||||
|  | ||||
| // NewEngine creates a new (wrapped) ginEngine | ||||
| @@ -72,18 +79,21 @@ func NewEngine(opt Options) *GinWrapper { | ||||
| 		opt:                   opt, | ||||
| 		suppressGinLogs:       langext.Coalesce(opt.SuppressGinLogs, false), | ||||
| 		allowCors:             langext.Coalesce(opt.AllowCors, false), | ||||
| 		corsAllowHeader:       langext.Coalesce(opt.CorsAllowHeader, []string{"Content-Type", "Content-Length", "Accept-Encoding", "X-CSRF-Token", "Authorization", "accept", "origin", "Cache-Control", "X-Requested-With"}), | ||||
| 		corsExposeHeader:      langext.Coalesce(opt.CorsExposeHeader, []string{}), | ||||
| 		ginDebug:              ginDebug, | ||||
| 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | ||||
| 		requestTimeout:        langext.Coalesce(opt.Timeout, 24*time.Hour), | ||||
| 		listenerBeforeRequest: opt.ListenerBeforeRequest, | ||||
| 		listenerAfterRequest:  opt.ListenerAfterRequest, | ||||
| 		buildRequestBindError: langext.Conditional(opt.BuildRequestBindError == nil, defaultBuildRequestBindError, opt.BuildRequestBindError), | ||||
| 	} | ||||
|  | ||||
| 	engine.RedirectFixedPath = false | ||||
| 	engine.RedirectTrailingSlash = false | ||||
|  | ||||
| 	if wrapper.allowCors { | ||||
| 		engine.Use(CorsMiddleware()) | ||||
| 		engine.Use(CorsMiddleware(wrapper.corsAllowHeader, wrapper.corsExposeHeader)) | ||||
| 	} | ||||
|  | ||||
| 	if ginDebug && !wrapper.suppressGinLogs { | ||||
| @@ -222,3 +232,7 @@ func (w *GinWrapper) ForwardRequest(writer http.ResponseWriter, req *http.Reques | ||||
| func (w *GinWrapper) ListRoutes() []gin.RouteInfo { | ||||
| 	return w.engine.Routes() | ||||
| } | ||||
|  | ||||
| func defaultBuildRequestBindError(g *gin.Context, fieldtype string, err error) HTTPResponse { | ||||
| 	return Error(err) | ||||
| } | ||||
|   | ||||
							
								
								
									
										9
									
								
								ginext/jsonFilter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								ginext/jsonFilter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| package ginext | ||||
|  | ||||
| import "github.com/gin-gonic/gin" | ||||
|  | ||||
| var jsonFilterKey = "goext.jsonfilter" | ||||
|  | ||||
| func SetJSONFilter(g *gin.Context, filter string) { | ||||
| 	g.Set(jsonFilterKey, filter) | ||||
| } | ||||
| @@ -15,16 +15,17 @@ import ( | ||||
| ) | ||||
|  | ||||
| type PreContext struct { | ||||
| 	ginCtx         *gin.Context | ||||
| 	wrapper        *GinWrapper | ||||
| 	uri            any | ||||
| 	query          any | ||||
| 	body           any | ||||
| 	rawbody        *[]byte | ||||
| 	form           any | ||||
| 	header         any | ||||
| 	timeout        *time.Duration | ||||
| 	persistantData *preContextData // must be a ptr, so that we can get the values back in out Wrap func | ||||
| 	ginCtx                 *gin.Context | ||||
| 	wrapper                *GinWrapper | ||||
| 	uri                    any | ||||
| 	query                  any | ||||
| 	body                   any | ||||
| 	rawbody                *[]byte | ||||
| 	form                   any | ||||
| 	header                 any | ||||
| 	timeout                *time.Duration | ||||
| 	persistantData         *preContextData // must be a ptr, so that we can get the values back in out Wrap func | ||||
| 	ignoreWrongContentType bool | ||||
| } | ||||
|  | ||||
| type preContextData struct { | ||||
| @@ -71,6 +72,11 @@ func (pctx *PreContext) WithSession(sessionObj SessionObject) *PreContext { | ||||
| 	return pctx | ||||
| } | ||||
|  | ||||
| func (pctx *PreContext) IgnoreWrongContentType() *PreContext { | ||||
| 	pctx.ignoreWrongContentType = true | ||||
| 	return pctx | ||||
| } | ||||
|  | ||||
| func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
| 	if pctx.uri != nil { | ||||
| 		if err := pctx.ginCtx.ShouldBindUri(pctx.uri); err != nil { | ||||
| @@ -78,7 +84,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
| 				WithType(exerr.TypeBindFailURI). | ||||
| 				Str("struct_type", fmt.Sprintf("%T", pctx.uri)). | ||||
| 				Build() | ||||
| 			return nil, nil, langext.Ptr(Error(err)) | ||||
| 			return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "URI", err)) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -88,24 +94,37 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
| 				WithType(exerr.TypeBindFailQuery). | ||||
| 				Str("struct_type", fmt.Sprintf("%T", pctx.query)). | ||||
| 				Build() | ||||
| 			return nil, nil, langext.Ptr(Error(err)) | ||||
| 			return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "QUERY", err)) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if pctx.body != nil { | ||||
| 		if pctx.ginCtx.ContentType() == "application/json" { | ||||
| 			if brc, ok := pctx.body.(dataext.BufferedReadCloser); ok { | ||||
| 				// Ensures a fully reset (offset=0) buffer before parsing | ||||
| 				err := brc.Reset() | ||||
| 				if err != nil { | ||||
| 					err = exerr.Wrap(err, "Failed to read (brc.reset) json-body"). | ||||
| 						WithType(exerr.TypeBindFailJSON). | ||||
| 						Str("struct_type", fmt.Sprintf("%T", pctx.body)). | ||||
| 						Build() | ||||
| 					return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err)) | ||||
| 				} | ||||
| 			} | ||||
| 			if err := pctx.ginCtx.ShouldBindJSON(pctx.body); err != nil { | ||||
| 				err = exerr.Wrap(err, "Failed to read json-body"). | ||||
| 					WithType(exerr.TypeBindFailJSON). | ||||
| 					Str("struct_type", fmt.Sprintf("%T", pctx.body)). | ||||
| 					Build() | ||||
| 				return nil, nil, langext.Ptr(Error(err)) | ||||
| 				return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err)) | ||||
| 			} | ||||
| 		} else { | ||||
| 			err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body"). | ||||
| 				Str("struct_type", fmt.Sprintf("%T", pctx.body)). | ||||
| 				Build() | ||||
| 			return nil, nil, langext.Ptr(Error(err)) | ||||
| 			if !pctx.ignoreWrongContentType { | ||||
| 				err := exerr.New(exerr.TypeBindFailJSON, "missing JSON body"). | ||||
| 					Str("struct_type", fmt.Sprintf("%T", pctx.body)). | ||||
| 					Build() | ||||
| 				return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err)) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -113,14 +132,14 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
| 		if brc, ok := pctx.ginCtx.Request.Body.(dataext.BufferedReadCloser); ok { | ||||
| 			v, err := brc.BufferedAll() | ||||
| 			if err != nil { | ||||
| 				return nil, nil, langext.Ptr(Error(err)) | ||||
| 				return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "BODY", err)) | ||||
| 			} | ||||
| 			*pctx.rawbody = v | ||||
| 		} else { | ||||
| 			buf := &bytes.Buffer{} | ||||
| 			_, err := io.Copy(buf, pctx.ginCtx.Request.Body) | ||||
| 			if err != nil { | ||||
| 				return nil, nil, langext.Ptr(Error(err)) | ||||
| 				return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "BODY", err)) | ||||
| 			} | ||||
| 			*pctx.rawbody = buf.Bytes() | ||||
| 		} | ||||
| @@ -133,7 +152,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
| 					WithType(exerr.TypeBindFailFormData). | ||||
| 					Str("struct_type", fmt.Sprintf("%T", pctx.form)). | ||||
| 					Build() | ||||
| 				return nil, nil, langext.Ptr(Error(err)) | ||||
| 				return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err)) | ||||
| 			} | ||||
| 		} else if pctx.ginCtx.ContentType() == "application/x-www-form-urlencoded" { | ||||
| 			if err := pctx.ginCtx.ShouldBindWith(pctx.form, binding.Form); err != nil { | ||||
| @@ -141,13 +160,15 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
| 					WithType(exerr.TypeBindFailFormData). | ||||
| 					Str("struct_type", fmt.Sprintf("%T", pctx.form)). | ||||
| 					Build() | ||||
| 				return nil, nil, langext.Ptr(Error(err)) | ||||
| 				return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err)) | ||||
| 			} | ||||
| 		} else { | ||||
| 			err := exerr.New(exerr.TypeBindFailFormData, "missing form body"). | ||||
| 				Str("struct_type", fmt.Sprintf("%T", pctx.form)). | ||||
| 				Build() | ||||
| 			return nil, nil, langext.Ptr(Error(err)) | ||||
| 			if !pctx.ignoreWrongContentType { | ||||
| 				err := exerr.New(exerr.TypeBindFailFormData, "missing form body"). | ||||
| 					Str("struct_type", fmt.Sprintf("%T", pctx.form)). | ||||
| 					Build() | ||||
| 				return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "FORM", err)) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -157,7 +178,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
| 				WithType(exerr.TypeBindFailHeader). | ||||
| 				Str("struct_type", fmt.Sprintf("%T", pctx.query)). | ||||
| 				Build() | ||||
| 			return nil, nil, langext.Ptr(Error(err)) | ||||
| 			return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "HEADER", err)) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -169,7 +190,7 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
| 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx) | ||||
| 		if err != nil { | ||||
| 			actx.Cancel() | ||||
| 			return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build())) | ||||
| 			return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "INIT", err)) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
|   | ||||
| @@ -36,6 +36,12 @@ type InspectableHTTPResponse interface { | ||||
| 	Headers() []string | ||||
| } | ||||
|  | ||||
| type HTTPErrorResponse interface { | ||||
| 	HTTPResponse | ||||
|  | ||||
| 	Error() error | ||||
| } | ||||
|  | ||||
| func NotImplemented() HTTPResponse { | ||||
| 	return Error(exerr.New(exerr.TypeNotImplemented, "").Build()) | ||||
| } | ||||
|   | ||||
| @@ -7,17 +7,21 @@ import ( | ||||
| ) | ||||
|  | ||||
| type jsonHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	data       any | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| 	statusCode     int | ||||
| 	data           any | ||||
| 	headers        []headerval | ||||
| 	cookies        []cookieval | ||||
| 	filterOverride *string | ||||
| } | ||||
|  | ||||
| func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender { | ||||
| 	var f *string | ||||
| 	if jsonfilter := g.GetString("goext.jsonfilter"); jsonfilter != "" { | ||||
| 	if jsonfilter := g.GetString(jsonFilterKey); jsonfilter != "" { | ||||
| 		f = &jsonfilter | ||||
| 	} | ||||
| 	if j.filterOverride != nil { | ||||
| 		f = j.filterOverride | ||||
| 	} | ||||
| 	return json.GoJsonRender{Data: j.data, NilSafeSlices: true, NilSafeMaps: true, Filter: f} | ||||
| } | ||||
|  | ||||
| @@ -68,3 +72,7 @@ func (j jsonHTTPResponse) Headers() []string { | ||||
| func JSON(sc int, data any) HTTPResponse { | ||||
| 	return &jsonHTTPResponse{statusCode: sc, data: data} | ||||
| } | ||||
|  | ||||
| func JSONWithFilter(sc int, data any, f string) HTTPResponse { | ||||
| 	return &jsonHTTPResponse{statusCode: sc, data: data, filterOverride: &f} | ||||
| } | ||||
|   | ||||
| @@ -13,6 +13,10 @@ type jsonAPIErrResponse struct { | ||||
| 	cookies []cookieval | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) Error() error { | ||||
| 	return j.err | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
|   | ||||
| @@ -57,7 +57,7 @@ func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper | ||||
| } | ||||
|  | ||||
| func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper { | ||||
| 	return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) }) | ||||
| 	return w.Use(func(g *gin.Context) { g.Set(jsonFilterKey, filter) }) | ||||
| } | ||||
|  | ||||
| func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder { | ||||
| @@ -112,7 +112,7 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder { | ||||
| } | ||||
|  | ||||
| func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder { | ||||
| 	return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) }) | ||||
| 	return w.Use(func(g *gin.Context) { g.Set(jsonFilterKey, filter) }) | ||||
| } | ||||
|  | ||||
| func (w *GinRouteBuilder) Handle(handler WHandlerFunc) { | ||||
|   | ||||
							
								
								
									
										50
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										50
									
								
								go.mod
									
									
									
									
									
								
							| @@ -1,63 +1,61 @@ | ||||
| module gogs.mikescher.com/BlackForestBytes/goext | ||||
|  | ||||
| go 1.22 | ||||
| go 1.23 | ||||
|  | ||||
| require ( | ||||
| 	github.com/gin-gonic/gin v1.10.0 | ||||
| 	github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.- | ||||
| 	github.com/jmoiron/sqlx v1.4.0 | ||||
| 	github.com/rs/xid v1.5.0 | ||||
| 	github.com/rs/xid v1.6.0 | ||||
| 	github.com/rs/zerolog v1.33.0 | ||||
| 	go.mongodb.org/mongo-driver v1.16.0 | ||||
| 	golang.org/x/crypto v0.24.0 | ||||
| 	golang.org/x/sys v0.21.0 | ||||
| 	golang.org/x/term v0.21.0 | ||||
| 	go.mongodb.org/mongo-driver v1.17.2 | ||||
| 	golang.org/x/crypto v0.32.0 | ||||
| 	golang.org/x/sys v0.29.0 | ||||
| 	golang.org/x/term v0.28.0 | ||||
| ) | ||||
|  | ||||
| require ( | ||||
| 	github.com/disintegration/imaging v1.6.2 | ||||
| 	github.com/jung-kurt/gofpdf v1.16.2 | ||||
| 	golang.org/x/sync v0.7.0 | ||||
| 	golang.org/x/sync v0.10.0 | ||||
| ) | ||||
|  | ||||
| require ( | ||||
| 	github.com/bytedance/sonic v1.11.9 // indirect | ||||
| 	github.com/bytedance/sonic/loader v0.1.1 // indirect | ||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect | ||||
| 	github.com/chenzhuoyu/iasm v0.9.1 // indirect | ||||
| 	github.com/cloudwego/base64x v0.1.4 // indirect | ||||
| 	github.com/bytedance/sonic v1.12.8 // indirect | ||||
| 	github.com/bytedance/sonic/loader v0.2.3 // indirect | ||||
| 	github.com/cloudwego/base64x v0.1.5 // indirect | ||||
| 	github.com/cloudwego/iasm v0.2.0 // indirect | ||||
| 	github.com/dustin/go-humanize v1.0.1 // indirect | ||||
| 	github.com/gabriel-vasile/mimetype v1.4.4 // indirect | ||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||
| 	github.com/gabriel-vasile/mimetype v1.4.8 // indirect | ||||
| 	github.com/gin-contrib/sse v1.0.0 // indirect | ||||
| 	github.com/go-playground/locales v0.14.1 // indirect | ||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||
| 	github.com/go-playground/validator/v10 v10.22.0 // indirect | ||||
| 	github.com/goccy/go-json v0.10.3 // indirect | ||||
| 	github.com/go-playground/validator/v10 v10.24.0 // indirect | ||||
| 	github.com/goccy/go-json v0.10.5 // indirect | ||||
| 	github.com/golang/snappy v0.0.4 // indirect | ||||
| 	github.com/google/uuid v1.5.0 // indirect | ||||
| 	github.com/json-iterator/go v1.1.12 // indirect | ||||
| 	github.com/klauspost/compress v1.17.9 // indirect | ||||
| 	github.com/klauspost/cpuid/v2 v2.2.8 // indirect | ||||
| 	github.com/klauspost/compress v1.17.11 // indirect | ||||
| 	github.com/klauspost/cpuid/v2 v2.2.9 // indirect | ||||
| 	github.com/leodido/go-urn v1.4.0 // indirect | ||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||
| 	github.com/mattn/go-colorable v0.1.14 // indirect | ||||
| 	github.com/mattn/go-isatty v0.0.20 // indirect | ||||
| 	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect | ||||
| 	github.com/modern-go/reflect2 v1.0.2 // indirect | ||||
| 	github.com/montanaflynn/stats v0.7.1 // indirect | ||||
| 	github.com/pelletier/go-toml/v2 v2.2.2 // indirect | ||||
| 	github.com/pelletier/go-toml/v2 v2.2.3 // indirect | ||||
| 	github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect | ||||
| 	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect | ||||
| 	github.com/ugorji/go/codec v1.2.12 // indirect | ||||
| 	github.com/xdg-go/pbkdf2 v1.0.0 // indirect | ||||
| 	github.com/xdg-go/scram v1.1.2 // indirect | ||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||
| 	github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 // indirect | ||||
| 	golang.org/x/arch v0.8.0 // indirect | ||||
| 	golang.org/x/image v0.18.0 // indirect | ||||
| 	golang.org/x/net v0.26.0 // indirect | ||||
| 	golang.org/x/text v0.16.0 // indirect | ||||
| 	google.golang.org/protobuf v1.34.2 // indirect | ||||
| 	github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect | ||||
| 	golang.org/x/arch v0.13.0 // indirect | ||||
| 	golang.org/x/image v0.23.0 // indirect | ||||
| 	golang.org/x/net v0.34.0 // indirect | ||||
| 	golang.org/x/text v0.21.0 // indirect | ||||
| 	google.golang.org/protobuf v1.36.4 // indirect | ||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||
| 	modernc.org/libc v1.37.6 // indirect | ||||
| 	modernc.org/mathutil v1.6.0 // indirect | ||||
|   | ||||
							
								
								
									
										356
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										356
									
								
								go.sum
									
									
									
									
									
								
							| @@ -1,48 +1,31 @@ | ||||
| filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= | ||||
| filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= | ||||
| github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= | ||||
| github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= | ||||
| github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= | ||||
| github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE= | ||||
| github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo= | ||||
| github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.1 h1:JC0+6c9FoWYYxakaoa+c5QTtJeiSZNeByOBhXtAFSn4= | ||||
| github.com/bytedance/sonic v1.11.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A= | ||||
| github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.3 h1:jRN+yEjakWh8aK5FzrciUHG8OFXK+4/KrAX/ysEtHAA= | ||||
| github.com/bytedance/sonic v1.11.3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= | ||||
| github.com/bytedance/sonic v1.11.4 h1:8+OMLSSDDm2/qJc6ld5K5Sm62NK9VHcUKk0NzBoMAM4= | ||||
| github.com/bytedance/sonic v1.11.4/go.mod h1:YrWEqYtlBPS6LUA0vpuG79a1trsh4Ae41uWUWUreHhE= | ||||
| github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k= | ||||
| github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw= | ||||
| github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0= | ||||
| github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||
| github.com/bytedance/sonic v1.11.7 h1:k/l9p1hZpNIMJSk37wL9ltkcpqLfIho1vYthi4xT2t4= | ||||
| github.com/bytedance/sonic v1.11.7/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||
| github.com/bytedance/sonic v1.11.8 h1:Zw/j1KfiS+OYTi9lyB3bb0CFxPJVkM17k1wyDG32LRA= | ||||
| github.com/bytedance/sonic v1.11.8/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||
| github.com/bytedance/sonic v1.11.9 h1:LFHENlIY/SLzDWverzdOvgMztTxcfcF+cqNsz9pK5zg= | ||||
| github.com/bytedance/sonic v1.11.9/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= | ||||
| github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY= | ||||
| github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= | ||||
| github.com/bytedance/sonic v1.12.3 h1:W2MGa7RCU1QTeYRTPE3+88mVC0yXmsRQRChiyVocVjU= | ||||
| github.com/bytedance/sonic v1.12.3/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= | ||||
| github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k= | ||||
| github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= | ||||
| github.com/bytedance/sonic v1.12.5 h1:hoZxY8uW+mT+OpkcUWw4k0fDINtOcVavEsGfzwzFU/w= | ||||
| github.com/bytedance/sonic v1.12.5/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= | ||||
| github.com/bytedance/sonic v1.12.6 h1:/isNmCUF2x3Sh8RAp/4mh4ZGkcFAX/hLrzrK3AvpRzk= | ||||
| github.com/bytedance/sonic v1.12.6/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= | ||||
| github.com/bytedance/sonic v1.12.7 h1:CQU8pxOy9HToxhndH0Kx/S1qU/CuS9GnKYrGioDcU1Q= | ||||
| github.com/bytedance/sonic v1.12.7/go.mod h1:tnbal4mxOMju17EGfknm2XyYcpyCnIROYOEYuemj13I= | ||||
| github.com/bytedance/sonic v1.12.8 h1:4xYRVRlXIgvSZ4e8iVTlMF5szgpXd4AfvuWgA8I8lgs= | ||||
| github.com/bytedance/sonic v1.12.8/go.mod h1:uVvFidNmlt9+wa31S1urfwwthTWteBgG0hWuoKAXTx8= | ||||
| github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||
| github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= | ||||
| github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= | ||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= | ||||
| github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= | ||||
| github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||
| github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0= | ||||
| github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= | ||||
| github.com/cloudwego/base64x v0.1.0 h1:Tg5q9tq1khq9Y9UwfoC6zkHK0FypN2GLDvhqFceOL8U= | ||||
| github.com/cloudwego/base64x v0.1.0/go.mod h1:lM8nFiNbg74QgesNo6EAtv8N9tlRjBWExmHoNDa3PkU= | ||||
| github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg= | ||||
| github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8= | ||||
| github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM= | ||||
| github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||
| github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E= | ||||
| github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||
| github.com/bytedance/sonic/loader v0.2.2 h1:jxAJuN9fOot/cyz5Q6dUuMJF5OqQ6+5GfA8FjjQ0R4o= | ||||
| github.com/bytedance/sonic/loader v0.2.2/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= | ||||
| github.com/bytedance/sonic/loader v0.2.3 h1:yctD0Q3v2NOGfSWPLPvG2ggA2kV6TS6s4wioyEqssH0= | ||||
| github.com/bytedance/sonic/loader v0.2.3/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= | ||||
| github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= | ||||
| github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= | ||||
| github.com/cloudwego/iasm v0.0.9/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||
| github.com/cloudwego/iasm v0.1.0 h1:q0OuhwWDMyi3nlrQ6kIr0Yx0c3FI6cq/OZWKodIDdz8= | ||||
| github.com/cloudwego/iasm v0.1.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||
| github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4= | ||||
| github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= | ||||
| github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= | ||||
| github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= | ||||
| github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= | ||||
| @@ -53,14 +36,18 @@ github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1 | ||||
| github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4= | ||||
| github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= | ||||
| github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= | ||||
| github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= | ||||
| github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= | ||||
| github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I= | ||||
| github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s= | ||||
| github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4= | ||||
| github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4= | ||||
| github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc= | ||||
| github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc= | ||||
| github.com/gabriel-vasile/mimetype v1.4.7 h1:SKFKl7kD0RiPdbht0s7hFtjl489WcQ1VyPW8ZzUMYCA= | ||||
| github.com/gabriel-vasile/mimetype v1.4.7/go.mod h1:GDlAgAyIRT27BhFl53XNAFtfjzOkLaF35JdEG0P7LtU= | ||||
| github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= | ||||
| github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= | ||||
| github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | ||||
| github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | ||||
| github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= | ||||
| github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= | ||||
| github.com/gin-contrib/sse v1.0.0 h1:y3bT1mUWUxDpW4JLQg/HnTqV4rozuW4tC9eFKTxYI9E= | ||||
| github.com/gin-contrib/sse v1.0.0/go.mod h1:zNuFdwarAygJBht0NTKiSi3jRf6RbqeILZ9Sp6Slhe0= | ||||
| github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= | ||||
| github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= | ||||
| github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ= | ||||
| @@ -71,43 +58,30 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o | ||||
| github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= | ||||
| github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= | ||||
| github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | ||||
| github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | ||||
| github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||
| github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74= | ||||
| github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||
| github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U= | ||||
| github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= | ||||
| github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8= | ||||
| github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-playground/validator/v10 v10.21.0 h1:4fZA11ovvtkdgaeev9RGWPgc1uj3H8W+rNYyH/ySBb0= | ||||
| github.com/go-playground/validator/v10 v10.21.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao= | ||||
| github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||
| github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA= | ||||
| github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o= | ||||
| github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= | ||||
| github.com/go-playground/validator/v10 v10.24.0 h1:KHQckvo8G6hlWnrPX4NJJ+aBfWNAE/HH+qdL2cBpCmg= | ||||
| github.com/go-playground/validator/v10 v10.24.0/go.mod h1:GGzBIJMuE98Ic/kJsBXbz1x/7cByt++cQ+YOuDM5wus= | ||||
| github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= | ||||
| github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= | ||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||
| github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | ||||
| github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= | ||||
| github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= | ||||
| github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM= | ||||
| github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= | ||||
| github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= | ||||
| github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= | ||||
| github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= | ||||
| github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | ||||
| github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||
| github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= | ||||
| github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||
| github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | ||||
| github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= | ||||
| github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | ||||
| github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= | ||||
| github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= | ||||
| github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= | ||||
| github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ= | ||||
| github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= | ||||
| github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= | ||||
| github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||||
| github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= | ||||
| github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= | ||||
| github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= | ||||
| github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= | ||||
| github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= | ||||
| @@ -115,58 +89,39 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm | ||||
| github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= | ||||
| github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5PtRc= | ||||
| github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= | ||||
| github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= | ||||
| github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= | ||||
| github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||
| github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= | ||||
| github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||
| github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= | ||||
| github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||
| github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU= | ||||
| github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||
| github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= | ||||
| github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= | ||||
| github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0= | ||||
| github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= | ||||
| github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= | ||||
| github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= | ||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||
| github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= | ||||
| github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||
| github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= | ||||
| github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||
| github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= | ||||
| github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||
| github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY= | ||||
| github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8= | ||||
| github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= | ||||
| github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= | ||||
| github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= | ||||
| github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= | ||||
| github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= | ||||
| github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0= | ||||
| github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= | ||||
| github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= | ||||
| github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= | ||||
| github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= | ||||
| github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= | ||||
| github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= | ||||
| github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= | ||||
| github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= | ||||
| github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | ||||
| github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= | ||||
| github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= | ||||
| github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= | ||||
| github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI= | ||||
| github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= | ||||
| github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= | ||||
| github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= | ||||
| github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= | ||||
| github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= | ||||
| github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= | ||||
| github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= | ||||
| github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= | ||||
| github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= | ||||
| github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= | ||||
| github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= | ||||
| github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= | ||||
| github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo= | ||||
| github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||
| github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg= | ||||
| github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||
| github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= | ||||
| github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= | ||||
| github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= | ||||
| github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= | ||||
| github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= | ||||
| github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||
| github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||
| @@ -174,12 +129,9 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb | ||||
| github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | ||||
| github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= | ||||
| github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= | ||||
| github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= | ||||
| github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= | ||||
| github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= | ||||
| github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||
| github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= | ||||
| github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||
| github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU= | ||||
| github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= | ||||
| github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= | ||||
| github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= | ||||
| github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= | ||||
| @@ -193,11 +145,10 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ | ||||
| github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | ||||
| github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= | ||||
| github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | ||||
| github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= | ||||
| github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= | ||||
| github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= | ||||
| github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= | ||||
| github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= | ||||
| github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= | ||||
| github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= | ||||
| github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= | ||||
| github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= | ||||
| @@ -208,86 +159,63 @@ github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= | ||||
| github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= | ||||
| github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= | ||||
| github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= | ||||
| github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= | ||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= | ||||
| github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= | ||||
| github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76 h1:tBiBTKHnIjovYoLX/TPkcf+OjqqKGQrPtGT3Foz+Pgo= | ||||
| github.com/youmark/pkcs8 v0.0.0-20240424034433-3c2c7870ae76/go.mod h1:SQliXeA7Dhkt//vS29v3zpbEwoa+zb2Cn5xj5uO4K5U= | ||||
| github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= | ||||
| github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= | ||||
| github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | ||||
| go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk= | ||||
| go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo= | ||||
| go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80= | ||||
| go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||
| go.mongodb.org/mongo-driver v1.15.0 h1:rJCKC8eEliewXjZGf0ddURtl7tTVy1TK3bfl0gkUSLc= | ||||
| go.mongodb.org/mongo-driver v1.15.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||
| go.mongodb.org/mongo-driver v1.15.1 h1:l+RvoUOoMXFmADTLfYDm7On9dRm7p4T80/lEQM+r7HU= | ||||
| go.mongodb.org/mongo-driver v1.15.1/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= | ||||
| go.mongodb.org/mongo-driver v1.16.0 h1:tpRsfBJMROVHKpdGyc1BBEzzjDUWjItxbVSZ8Ls4BQ4= | ||||
| go.mongodb.org/mongo-driver v1.16.0/go.mod h1:oB6AhJQvFQL4LEHyXi6aJzQJtBiTQHiAd83l0GdFaiw= | ||||
| golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= | ||||
| golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= | ||||
| golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||
| golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc= | ||||
| golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||
| go.mongodb.org/mongo-driver v1.17.1 h1:Wic5cJIwJgSpBhe3lx3+/RybR5PiYRMpVFgO7cOHyIM= | ||||
| go.mongodb.org/mongo-driver v1.17.1/go.mod h1:wwWm/+BuOddhcq3n68LKRmgk2wXzmF6s0SFOa0GINL4= | ||||
| go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793SqyhzM= | ||||
| go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= | ||||
| golang.org/x/arch v0.11.0 h1:KXV8WWKCXm6tRpLirl2szsO5j/oOODwZf4hATmGVNs4= | ||||
| golang.org/x/arch v0.11.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||
| golang.org/x/arch v0.12.0 h1:UsYJhbzPYGsT0HbEdmYcqtCv8UNGvnaL561NnIUvaKg= | ||||
| golang.org/x/arch v0.12.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||
| golang.org/x/arch v0.13.0 h1:KCkqVVV1kGg0X87TFysjCJ8MxtZEIU4Ja/yXGeoECdA= | ||||
| golang.org/x/arch v0.13.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= | ||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||
| golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | ||||
| golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= | ||||
| golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | ||||
| golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= | ||||
| golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= | ||||
| golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= | ||||
| golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= | ||||
| golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= | ||||
| golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= | ||||
| golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg= | ||||
| golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= | ||||
| golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= | ||||
| golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= | ||||
| golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= | ||||
| golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= | ||||
| golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= | ||||
| golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= | ||||
| golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= | ||||
| golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= | ||||
| golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= | ||||
| golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= | ||||
| golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= | ||||
| golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= | ||||
| golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY= | ||||
| golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= | ||||
| golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= | ||||
| golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= | ||||
| golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= | ||||
| golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= | ||||
| golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | ||||
| golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U= | ||||
| golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | ||||
| golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw= | ||||
| golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs= | ||||
| golang.org/x/image v0.17.0 h1:nTRVVdajgB8zCMZVsViyzhnMKPwYeroEERRC64JuLco= | ||||
| golang.org/x/image v0.17.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E= | ||||
| golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ= | ||||
| golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E= | ||||
| golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s= | ||||
| golang.org/x/image v0.21.0/go.mod h1:vUbsLavqK/W303ZroQQVKQ+Af3Yl6Uz1Ppu5J/cLz78= | ||||
| golang.org/x/image v0.22.0 h1:UtK5yLUzilVrkjMAZAZ34DXGpASN8i8pj8g+O+yd10g= | ||||
| golang.org/x/image v0.22.0/go.mod h1:9hPFhljd4zZ1GNSIZJ49sqbp45GKK9t6w+iXvGqZUz4= | ||||
| golang.org/x/image v0.23.0 h1:HseQ7c2OpPKTPVzNjG5fwJsOTCiiwS4QdsYi5XU6H68= | ||||
| golang.org/x/image v0.23.0/go.mod h1:wJJBTdLfCCf3tiHa1fNxpZmUI4mmoZvwMCPP0ddoNKY= | ||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||
| golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= | ||||
| golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= | ||||
| golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | ||||
| golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= | ||||
| golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= | ||||
| golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= | ||||
| golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= | ||||
| golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= | ||||
| golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= | ||||
| golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc= | ||||
| golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= | ||||
| golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= | ||||
| golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= | ||||
| golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= | ||||
| golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= | ||||
| golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= | ||||
| golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= | ||||
| golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= | ||||
| golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= | ||||
| golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= | ||||
| golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= | ||||
| golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI= | ||||
| golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs= | ||||
| golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= | ||||
| golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= | ||||
| golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= | ||||
| golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= | ||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||
| golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | ||||
| golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||
| golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= | ||||
| golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||
| golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= | ||||
| golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||
| golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= | ||||
| golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||
| golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= | ||||
| golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= | ||||
| golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | ||||
| golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | ||||
| golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| @@ -295,61 +223,50 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc | ||||
| golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | ||||
| golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= | ||||
| golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= | ||||
| golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= | ||||
| golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= | ||||
| golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= | ||||
| golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= | ||||
| golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= | ||||
| golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= | ||||
| golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= | ||||
| golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= | ||||
| golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= | ||||
| golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | ||||
| golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= | ||||
| golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= | ||||
| golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= | ||||
| golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= | ||||
| golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= | ||||
| golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= | ||||
| golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= | ||||
| golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= | ||||
| golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= | ||||
| golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw= | ||||
| golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= | ||||
| golang.org/x/term v0.21.0 h1:WVXCp+/EBEHOj53Rvu+7KiT/iElMrO8ACK16SMZ3jaA= | ||||
| golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= | ||||
| golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24= | ||||
| golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= | ||||
| golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= | ||||
| golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= | ||||
| golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= | ||||
| golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= | ||||
| golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= | ||||
| golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= | ||||
| golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | ||||
| golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||
| golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | ||||
| golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= | ||||
| golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= | ||||
| golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= | ||||
| golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= | ||||
| golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||
| golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= | ||||
| golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= | ||||
| golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= | ||||
| golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= | ||||
| golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= | ||||
| golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= | ||||
| golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= | ||||
| golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= | ||||
| golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= | ||||
| golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= | ||||
| golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= | ||||
| golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||
| golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= | ||||
| golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||
| golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||
| golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= | ||||
| golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= | ||||
| google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= | ||||
| google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||
| google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= | ||||
| google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||
| google.golang.org/protobuf v1.34.0 h1:Qo/qEd2RZPCf2nKuorzksSknv0d3ERwp1vFG38gSmH4= | ||||
| google.golang.org/protobuf v1.34.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||
| google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= | ||||
| google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= | ||||
| google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= | ||||
| google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= | ||||
| google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= | ||||
| google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= | ||||
| google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= | ||||
| google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= | ||||
| google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk= | ||||
| google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= | ||||
| google.golang.org/protobuf v1.36.2 h1:R8FeyR1/eLmkutZOM5CWghmo5itiG9z0ktFlTVLuTmU= | ||||
| google.golang.org/protobuf v1.36.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= | ||||
| google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM= | ||||
| google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= | ||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | ||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||
| gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||
| @@ -364,4 +281,3 @@ modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E= | ||||
| modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ= | ||||
| modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0= | ||||
| nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= | ||||
| rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| package goext | ||||
|  | ||||
| const GoextVersion = "0.0.480" | ||||
| const GoextVersion = "0.0.563" | ||||
|  | ||||
| const GoextVersionTimestamp = "2024-07-02T11:32:22+0200" | ||||
| const GoextVersionTimestamp = "2025-01-31T21:16:42+0100" | ||||
|   | ||||
| @@ -1,27 +0,0 @@ | ||||
| Copyright (c) 2009 The Go Authors. All rights reserved. | ||||
|  | ||||
| Redistribution and use in source and binary forms, with or without | ||||
| modification, are permitted provided that the following conditions are | ||||
| met: | ||||
|  | ||||
|    * Redistributions of source code must retain the above copyright | ||||
| notice, this list of conditions and the following disclaimer. | ||||
|    * Redistributions in binary form must reproduce the above | ||||
| copyright notice, this list of conditions and the following disclaimer | ||||
| in the documentation and/or other materials provided with the | ||||
| distribution. | ||||
|    * Neither the name of Google Inc. nor the names of its | ||||
| contributors may be used to endorse or promote products derived from | ||||
| this software without specific prior written permission. | ||||
|  | ||||
| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||
| "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||
| LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||
| A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||
| OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||
| SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||
| LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||
| DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||
| THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||
| (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||
| OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
| @@ -4,9 +4,12 @@ JSON serializer which serializes nil-Arrays as `[]` and nil-maps als `{}`. | ||||
|  | ||||
| Idea from: https://github.com/homelight/json | ||||
|  | ||||
| Forked from https://github.com/golang/go/tree/547e8e22fe565d65d1fd4d6e71436a5a855447b0/src/encoding/json ( tag go1.20.2 ) | ||||
| Forked from https://github.com/golang/go/tree/194de8fbfaf4c3ed54e1a3c1b14fc67a830b8d95/src/encoding/json ( tag go1.23.4 ) | ||||
|   -> https://github.com/golang/go/tree/go1.23.4/src/encoding/json | ||||
|  | ||||
| Added: | ||||
|  | ||||
|  - `MarshalSafeCollections()` method | ||||
|  - `Encoder.nilSafeSlices` and `Encoder.nilSafeMaps` fields | ||||
|  - `Add 'tagkey' to use different key than json (set on Decoder struct)` | ||||
|  - `Add 'jsonfilter' to filter printed fields (set via MarshalSafeCollections)` | ||||
							
								
								
									
										141
									
								
								gojson/decode.go
									
									
									
									
									
								
							
							
						
						
									
										141
									
								
								gojson/decode.go
									
									
									
									
									
								
							| @@ -17,14 +17,15 @@ import ( | ||||
| 	"unicode" | ||||
| 	"unicode/utf16" | ||||
| 	"unicode/utf8" | ||||
| 	_ "unsafe" // for linkname | ||||
| ) | ||||
|  | ||||
| // Unmarshal parses the JSON-encoded data and stores the result | ||||
| // in the value pointed to by v. If v is nil or not a pointer, | ||||
| // Unmarshal returns an InvalidUnmarshalError. | ||||
| // Unmarshal returns an [InvalidUnmarshalError]. | ||||
| // | ||||
| // Unmarshal uses the inverse of the encodings that | ||||
| // Marshal uses, allocating maps, slices, and pointers as necessary, | ||||
| // [Marshal] uses, allocating maps, slices, and pointers as necessary, | ||||
| // with the following additional rules: | ||||
| // | ||||
| // To unmarshal JSON into a pointer, Unmarshal first handles the case of | ||||
| @@ -33,28 +34,28 @@ import ( | ||||
| // the value pointed at by the pointer. If the pointer is nil, Unmarshal | ||||
| // allocates a new value for it to point to. | ||||
| // | ||||
| // To unmarshal JSON into a value implementing the Unmarshaler interface, | ||||
| // Unmarshal calls that value's UnmarshalJSON method, including | ||||
| // To unmarshal JSON into a value implementing [Unmarshaler], | ||||
| // Unmarshal calls that value's [Unmarshaler.UnmarshalJSON] method, including | ||||
| // when the input is a JSON null. | ||||
| // Otherwise, if the value implements encoding.TextUnmarshaler | ||||
| // and the input is a JSON quoted string, Unmarshal calls that value's | ||||
| // UnmarshalText method with the unquoted form of the string. | ||||
| // Otherwise, if the value implements [encoding.TextUnmarshaler] | ||||
| // and the input is a JSON quoted string, Unmarshal calls | ||||
| // [encoding.TextUnmarshaler.UnmarshalText] with the unquoted form of the string. | ||||
| // | ||||
| // To unmarshal JSON into a struct, Unmarshal matches incoming object | ||||
| // keys to the keys used by Marshal (either the struct field name or its tag), | ||||
| // keys to the keys used by [Marshal] (either the struct field name or its tag), | ||||
| // preferring an exact match but also accepting a case-insensitive match. By | ||||
| // default, object keys which don't have a corresponding struct field are | ||||
| // ignored (see Decoder.DisallowUnknownFields for an alternative). | ||||
| // ignored (see [Decoder.DisallowUnknownFields] for an alternative). | ||||
| // | ||||
| // To unmarshal JSON into an interface value, | ||||
| // Unmarshal stores one of these in the interface value: | ||||
| // | ||||
| //	bool, for JSON booleans | ||||
| //	float64, for JSON numbers | ||||
| //	string, for JSON strings | ||||
| //	[]interface{}, for JSON arrays | ||||
| //	map[string]interface{}, for JSON objects | ||||
| //	nil for JSON null | ||||
| //   - bool, for JSON booleans | ||||
| //   - float64, for JSON numbers | ||||
| //   - string, for JSON strings | ||||
| //   - []interface{}, for JSON arrays | ||||
| //   - map[string]interface{}, for JSON objects | ||||
| //   - nil for JSON null | ||||
| // | ||||
| // To unmarshal a JSON array into a slice, Unmarshal resets the slice length | ||||
| // to zero and then appends each element to the slice. | ||||
| @@ -72,16 +73,15 @@ import ( | ||||
| // use. If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal | ||||
| // reuses the existing map, keeping existing entries. Unmarshal then stores | ||||
| // key-value pairs from the JSON object into the map. The map's key type must | ||||
| // either be any string type, an integer, implement json.Unmarshaler, or | ||||
| // implement encoding.TextUnmarshaler. | ||||
| // either be any string type, an integer, or implement [encoding.TextUnmarshaler]. | ||||
| // | ||||
| // If the JSON-encoded data contain a syntax error, Unmarshal returns a SyntaxError. | ||||
| // If the JSON-encoded data contain a syntax error, Unmarshal returns a [SyntaxError]. | ||||
| // | ||||
| // If a JSON value is not appropriate for a given target type, | ||||
| // or if a JSON number overflows the target type, Unmarshal | ||||
| // skips that field and completes the unmarshaling as best it can. | ||||
| // If no more serious errors are encountered, Unmarshal returns | ||||
| // an UnmarshalTypeError describing the earliest such error. In any | ||||
| // an [UnmarshalTypeError] describing the earliest such error. In any | ||||
| // case, it's not guaranteed that all the remaining fields following | ||||
| // the problematic one will be unmarshaled into the target object. | ||||
| // | ||||
| @@ -114,7 +114,7 @@ func Unmarshal(data []byte, v any) error { | ||||
| // a JSON value. UnmarshalJSON must copy the JSON data | ||||
| // if it wishes to retain the data after returning. | ||||
| // | ||||
| // By convention, to approximate the behavior of Unmarshal itself, | ||||
| // By convention, to approximate the behavior of [Unmarshal] itself, | ||||
| // Unmarshalers implement UnmarshalJSON([]byte("null")) as a no-op. | ||||
| type Unmarshaler interface { | ||||
| 	UnmarshalJSON([]byte) error | ||||
| @@ -151,8 +151,8 @@ func (e *UnmarshalFieldError) Error() string { | ||||
| 	return "json: cannot unmarshal object key " + strconv.Quote(e.Key) + " into unexported field " + e.Field.Name + " of type " + e.Type.String() | ||||
| } | ||||
|  | ||||
| // An InvalidUnmarshalError describes an invalid argument passed to Unmarshal. | ||||
| // (The argument to Unmarshal must be a non-nil pointer.) | ||||
| // An InvalidUnmarshalError describes an invalid argument passed to [Unmarshal]. | ||||
| // (The argument to [Unmarshal] must be a non-nil pointer.) | ||||
| type InvalidUnmarshalError struct { | ||||
| 	Type reflect.Type | ||||
| } | ||||
| @@ -541,17 +541,10 @@ func (d *decodeState) array(v reflect.Value) error { | ||||
| 			break | ||||
| 		} | ||||
|  | ||||
| 		// Get element of array, growing if necessary. | ||||
| 		// Expand slice length, growing the slice if necessary. | ||||
| 		if v.Kind() == reflect.Slice { | ||||
| 			// Grow slice if necessary | ||||
| 			if i >= v.Cap() { | ||||
| 				newcap := v.Cap() + v.Cap()/2 | ||||
| 				if newcap < 4 { | ||||
| 					newcap = 4 | ||||
| 				} | ||||
| 				newv := reflect.MakeSlice(v.Type(), v.Len(), newcap) | ||||
| 				reflect.Copy(newv, v) | ||||
| 				v.Set(newv) | ||||
| 				v.Grow(1) | ||||
| 			} | ||||
| 			if i >= v.Len() { | ||||
| 				v.SetLen(i + 1) | ||||
| @@ -585,13 +578,11 @@ func (d *decodeState) array(v reflect.Value) error { | ||||
|  | ||||
| 	if i < v.Len() { | ||||
| 		if v.Kind() == reflect.Array { | ||||
| 			// Array. Zero the rest. | ||||
| 			z := reflect.Zero(v.Type().Elem()) | ||||
| 			for ; i < v.Len(); i++ { | ||||
| 				v.Index(i).Set(z) | ||||
| 				v.Index(i).SetZero() // zero remainder of array | ||||
| 			} | ||||
| 		} else { | ||||
| 			v.SetLen(i) | ||||
| 			v.SetLen(i) // truncate the slice | ||||
| 		} | ||||
| 	} | ||||
| 	if i == 0 && v.Kind() == reflect.Slice { | ||||
| @@ -601,7 +592,7 @@ func (d *decodeState) array(v reflect.Value) error { | ||||
| } | ||||
|  | ||||
| var nullLiteral = []byte("null") | ||||
| var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() | ||||
| var textUnmarshalerType = reflect.TypeFor[encoding.TextUnmarshaler]() | ||||
|  | ||||
| // object consumes an object from d.data[d.off-1:], decoding into v. | ||||
| // The first byte ('{') of the object has been read already. | ||||
| @@ -700,24 +691,13 @@ func (d *decodeState) object(v reflect.Value) error { | ||||
| 			if !mapElem.IsValid() { | ||||
| 				mapElem = reflect.New(elemType).Elem() | ||||
| 			} else { | ||||
| 				mapElem.Set(reflect.Zero(elemType)) | ||||
| 				mapElem.SetZero() | ||||
| 			} | ||||
| 			subv = mapElem | ||||
| 		} else { | ||||
| 			var f *field | ||||
| 			if i, ok := fields.nameIndex[string(key)]; ok { | ||||
| 				// Found an exact name match. | ||||
| 				f = &fields.list[i] | ||||
| 			} else { | ||||
| 				// Fall back to the expensive case-insensitive | ||||
| 				// linear search. | ||||
| 				for i := range fields.list { | ||||
| 					ff := &fields.list[i] | ||||
| 					if ff.equalFold(ff.nameBytes, key) { | ||||
| 						f = ff | ||||
| 						break | ||||
| 					} | ||||
| 				} | ||||
| 			f := fields.byExactName[string(key)] | ||||
| 			if f == nil { | ||||
| 				f = fields.byFoldedName[string(foldName(key))] | ||||
| 			} | ||||
| 			if f != nil { | ||||
| 				subv = v | ||||
| @@ -787,33 +767,35 @@ func (d *decodeState) object(v reflect.Value) error { | ||||
| 		if v.Kind() == reflect.Map { | ||||
| 			kt := t.Key() | ||||
| 			var kv reflect.Value | ||||
| 			switch { | ||||
| 			case reflect.PointerTo(kt).Implements(textUnmarshalerType): | ||||
| 			if reflect.PointerTo(kt).Implements(textUnmarshalerType) { | ||||
| 				kv = reflect.New(kt) | ||||
| 				if err := d.literalStore(item, kv, true); err != nil { | ||||
| 					return err | ||||
| 				} | ||||
| 				kv = kv.Elem() | ||||
| 			case kt.Kind() == reflect.String: | ||||
| 				kv = reflect.ValueOf(key).Convert(kt) | ||||
| 			default: | ||||
| 			} else { | ||||
| 				switch kt.Kind() { | ||||
| 				case reflect.String: | ||||
| 					kv = reflect.New(kt).Elem() | ||||
| 					kv.SetString(string(key)) | ||||
| 				case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: | ||||
| 					s := string(key) | ||||
| 					n, err := strconv.ParseInt(s, 10, 64) | ||||
| 					if err != nil || reflect.Zero(kt).OverflowInt(n) { | ||||
| 					if err != nil || kt.OverflowInt(n) { | ||||
| 						d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: kt, Offset: int64(start + 1)}) | ||||
| 						break | ||||
| 					} | ||||
| 					kv = reflect.ValueOf(n).Convert(kt) | ||||
| 					kv = reflect.New(kt).Elem() | ||||
| 					kv.SetInt(n) | ||||
| 				case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: | ||||
| 					s := string(key) | ||||
| 					n, err := strconv.ParseUint(s, 10, 64) | ||||
| 					if err != nil || reflect.Zero(kt).OverflowUint(n) { | ||||
| 					if err != nil || kt.OverflowUint(n) { | ||||
| 						d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: kt, Offset: int64(start + 1)}) | ||||
| 						break | ||||
| 					} | ||||
| 					kv = reflect.ValueOf(n).Convert(kt) | ||||
| 					kv = reflect.New(kt).Elem() | ||||
| 					kv.SetUint(n) | ||||
| 				default: | ||||
| 					panic("json: Unexpected key type") // should never occur | ||||
| 				} | ||||
| @@ -852,12 +834,12 @@ func (d *decodeState) convertNumber(s string) (any, error) { | ||||
| 	} | ||||
| 	f, err := strconv.ParseFloat(s, 64) | ||||
| 	if err != nil { | ||||
| 		return nil, &UnmarshalTypeError{Value: "number " + s, Type: reflect.TypeOf(0.0), Offset: int64(d.off)} | ||||
| 		return nil, &UnmarshalTypeError{Value: "number " + s, Type: reflect.TypeFor[float64](), Offset: int64(d.off)} | ||||
| 	} | ||||
| 	return f, nil | ||||
| } | ||||
|  | ||||
| var numberType = reflect.TypeOf(Number("")) | ||||
| var numberType = reflect.TypeFor[Number]() | ||||
|  | ||||
| // literalStore decodes a literal stored in item into v. | ||||
| // | ||||
| @@ -867,7 +849,7 @@ var numberType = reflect.TypeOf(Number("")) | ||||
| func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool) error { | ||||
| 	// Check for unmarshaler. | ||||
| 	if len(item) == 0 { | ||||
| 		//Empty string given | ||||
| 		// Empty string given. | ||||
| 		d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) | ||||
| 		return nil | ||||
| 	} | ||||
| @@ -914,7 +896,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool | ||||
| 		} | ||||
| 		switch v.Kind() { | ||||
| 		case reflect.Interface, reflect.Pointer, reflect.Map, reflect.Slice: | ||||
| 			v.Set(reflect.Zero(v.Type())) | ||||
| 			v.SetZero() | ||||
| 			// otherwise, ignore null for primitives/string | ||||
| 		} | ||||
| 	case 't', 'f': // true, false | ||||
| @@ -966,10 +948,11 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool | ||||
| 			} | ||||
| 			v.SetBytes(b[:n]) | ||||
| 		case reflect.String: | ||||
| 			if v.Type() == numberType && !isValidNumber(string(s)) { | ||||
| 			t := string(s) | ||||
| 			if v.Type() == numberType && !isValidNumber(t) { | ||||
| 				return fmt.Errorf("json: invalid number literal, trying to unmarshal %q into Number", item) | ||||
| 			} | ||||
| 			v.SetString(string(s)) | ||||
| 			v.SetString(t) | ||||
| 		case reflect.Interface: | ||||
| 			if v.NumMethod() == 0 { | ||||
| 				v.Set(reflect.ValueOf(string(s))) | ||||
| @@ -985,13 +968,12 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool | ||||
| 			} | ||||
| 			panic(phasePanicMsg) | ||||
| 		} | ||||
| 		s := string(item) | ||||
| 		switch v.Kind() { | ||||
| 		default: | ||||
| 			if v.Kind() == reflect.String && v.Type() == numberType { | ||||
| 				// s must be a valid number, because it's | ||||
| 				// already been tokenized. | ||||
| 				v.SetString(s) | ||||
| 				v.SetString(string(item)) | ||||
| 				break | ||||
| 			} | ||||
| 			if fromQuoted { | ||||
| @@ -999,7 +981,7 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool | ||||
| 			} | ||||
| 			d.saveError(&UnmarshalTypeError{Value: "number", Type: v.Type(), Offset: int64(d.readIndex())}) | ||||
| 		case reflect.Interface: | ||||
| 			n, err := d.convertNumber(s) | ||||
| 			n, err := d.convertNumber(string(item)) | ||||
| 			if err != nil { | ||||
| 				d.saveError(err) | ||||
| 				break | ||||
| @@ -1011,25 +993,25 @@ func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool | ||||
| 			v.Set(reflect.ValueOf(n)) | ||||
|  | ||||
| 		case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: | ||||
| 			n, err := strconv.ParseInt(s, 10, 64) | ||||
| 			n, err := strconv.ParseInt(string(item), 10, 64) | ||||
| 			if err != nil || v.OverflowInt(n) { | ||||
| 				d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())}) | ||||
| 				d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())}) | ||||
| 				break | ||||
| 			} | ||||
| 			v.SetInt(n) | ||||
|  | ||||
| 		case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: | ||||
| 			n, err := strconv.ParseUint(s, 10, 64) | ||||
| 			n, err := strconv.ParseUint(string(item), 10, 64) | ||||
| 			if err != nil || v.OverflowUint(n) { | ||||
| 				d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())}) | ||||
| 				d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())}) | ||||
| 				break | ||||
| 			} | ||||
| 			v.SetUint(n) | ||||
|  | ||||
| 		case reflect.Float32, reflect.Float64: | ||||
| 			n, err := strconv.ParseFloat(s, v.Type().Bits()) | ||||
| 			n, err := strconv.ParseFloat(string(item), v.Type().Bits()) | ||||
| 			if err != nil || v.OverflowFloat(n) { | ||||
| 				d.saveError(&UnmarshalTypeError{Value: "number " + s, Type: v.Type(), Offset: int64(d.readIndex())}) | ||||
| 				d.saveError(&UnmarshalTypeError{Value: "number " + string(item), Type: v.Type(), Offset: int64(d.readIndex())}) | ||||
| 				break | ||||
| 			} | ||||
| 			v.SetFloat(n) | ||||
| @@ -1201,6 +1183,15 @@ func unquote(s []byte) (t string, ok bool) { | ||||
| 	return | ||||
| } | ||||
|  | ||||
| // unquoteBytes should be an internal detail, | ||||
| // but widely used packages access it using linkname. | ||||
| // Notable members of the hall of shame include: | ||||
| //   - github.com/bytedance/sonic | ||||
| // | ||||
| // Do not remove or change the type signature. | ||||
| // See go.dev/issue/67401. | ||||
| // | ||||
| //go:linkname unquoteBytes | ||||
| func unquoteBytes(s []byte) (t []byte, ok bool) { | ||||
| 	if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { | ||||
| 		return | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										572
									
								
								gojson/encode.go
									
									
									
									
									
								
							
							
						
						
									
										572
									
								
								gojson/encode.go
									
									
									
									
									
								
							| @@ -12,45 +12,48 @@ package json | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"cmp" | ||||
| 	"encoding" | ||||
| 	"encoding/base64" | ||||
| 	"fmt" | ||||
| 	"math" | ||||
| 	"reflect" | ||||
| 	"sort" | ||||
| 	"slices" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"sync" | ||||
| 	"unicode" | ||||
| 	"unicode/utf8" | ||||
| 	_ "unsafe" // for linkname | ||||
| ) | ||||
|  | ||||
| // Marshal returns the JSON encoding of v. | ||||
| // | ||||
| // Marshal traverses the value v recursively. | ||||
| // If an encountered value implements the Marshaler interface | ||||
| // and is not a nil pointer, Marshal calls its MarshalJSON method | ||||
| // to produce JSON. If no MarshalJSON method is present but the | ||||
| // value implements encoding.TextMarshaler instead, Marshal calls | ||||
| // its MarshalText method and encodes the result as a JSON string. | ||||
| // If an encountered value implements [Marshaler] | ||||
| // and is not a nil pointer, Marshal calls [Marshaler.MarshalJSON] | ||||
| // to produce JSON. If no [Marshaler.MarshalJSON] method is present but the | ||||
| // value implements [encoding.TextMarshaler] instead, Marshal calls | ||||
| // [encoding.TextMarshaler.MarshalText] and encodes the result as a JSON string. | ||||
| // The nil pointer exception is not strictly necessary | ||||
| // but mimics a similar, necessary exception in the behavior of | ||||
| // UnmarshalJSON. | ||||
| // [Unmarshaler.UnmarshalJSON]. | ||||
| // | ||||
| // Otherwise, Marshal uses the following type-dependent default encodings: | ||||
| // | ||||
| // Boolean values encode as JSON booleans. | ||||
| // | ||||
| // Floating point, integer, and Number values encode as JSON numbers. | ||||
| // Floating point, integer, and [Number] values encode as JSON numbers. | ||||
| // NaN and +/-Inf values will return an [UnsupportedValueError]. | ||||
| // | ||||
| // String values encode as JSON strings coerced to valid UTF-8, | ||||
| // replacing invalid bytes with the Unicode replacement rune. | ||||
| // So that the JSON will be safe to embed inside HTML <script> tags, | ||||
| // the string is encoded using HTMLEscape, | ||||
| // the string is encoded using [HTMLEscape], | ||||
| // which replaces "<", ">", "&", U+2028, and U+2029 are escaped | ||||
| // to "\u003c","\u003e", "\u0026", "\u2028", and "\u2029". | ||||
| // This replacement can be disabled when using an Encoder, | ||||
| // by calling SetEscapeHTML(false). | ||||
| // This replacement can be disabled when using an [Encoder], | ||||
| // by calling [Encoder.SetEscapeHTML](false). | ||||
| // | ||||
| // Array and slice values encode as JSON arrays, except that | ||||
| // []byte encodes as a base64-encoded string, and a nil slice | ||||
| @@ -107,7 +110,7 @@ import ( | ||||
| // only Unicode letters, digits, and ASCII punctuation except quotation | ||||
| // marks, backslash, and comma. | ||||
| // | ||||
| // Anonymous struct fields are usually marshaled as if their inner exported fields | ||||
| // Embedded struct fields are usually marshaled as if their inner exported fields | ||||
| // were fields in the outer struct, subject to the usual Go visibility rules amended | ||||
| // as described in the next paragraph. | ||||
| // An anonymous struct field with a name given in its JSON tag is treated as | ||||
| @@ -134,11 +137,11 @@ import ( | ||||
| // a JSON tag of "-". | ||||
| // | ||||
| // Map values encode as JSON objects. The map's key type must either be a | ||||
| // string, an integer type, or implement encoding.TextMarshaler. The map keys | ||||
| // string, an integer type, or implement [encoding.TextMarshaler]. The map keys | ||||
| // are sorted and used as JSON object keys by applying the following rules, | ||||
| // subject to the UTF-8 coercion described for string values above: | ||||
| //   - keys of any string type are used directly | ||||
| //   - encoding.TextMarshalers are marshaled | ||||
| //   - keys that implement [encoding.TextMarshaler] are marshaled | ||||
| //   - integer keys are converted to strings | ||||
| // | ||||
| // Pointer values encode as the value pointed to. | ||||
| @@ -149,13 +152,14 @@ import ( | ||||
| // | ||||
| // Channel, complex, and function values cannot be encoded in JSON. | ||||
| // Attempting to encode such a value causes Marshal to return | ||||
| // an UnsupportedTypeError. | ||||
| // an [UnsupportedTypeError]. | ||||
| // | ||||
| // JSON cannot represent cyclic data structures and Marshal does not | ||||
| // handle them. Passing cyclic structures to Marshal will result in | ||||
| // an error. | ||||
| func Marshal(v any) ([]byte, error) { | ||||
| 	e := newEncodeState() | ||||
| 	defer encodeStatePool.Put(e) | ||||
|  | ||||
| 	err := e.marshal(v, encOpts{escapeHTML: true}) | ||||
| 	if err != nil { | ||||
| @@ -163,8 +167,6 @@ func Marshal(v any) ([]byte, error) { | ||||
| 	} | ||||
| 	buf := append([]byte(nil), e.Bytes()...) | ||||
|  | ||||
| 	encodeStatePool.Put(e) | ||||
|  | ||||
| 	return buf, nil | ||||
| } | ||||
|  | ||||
| @@ -194,7 +196,7 @@ func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // MarshalIndent is like Marshal but applies Indent to format the output. | ||||
| // MarshalIndent is like [Marshal] but applies [Indent] to format the output. | ||||
| // Each JSON element in the output will begin on a new line beginning with prefix | ||||
| // followed by one or more copies of indent according to the indentation nesting. | ||||
| func MarshalIndent(v any, prefix, indent string) ([]byte, error) { | ||||
| @@ -202,47 +204,12 @@ func MarshalIndent(v any, prefix, indent string) ([]byte, error) { | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	var buf bytes.Buffer | ||||
| 	err = Indent(&buf, b, prefix, indent) | ||||
| 	b2 := make([]byte, 0, indentGrowthFactor*len(b)) | ||||
| 	b2, err = appendIndent(b2, b, prefix, indent) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	return buf.Bytes(), nil | ||||
| } | ||||
|  | ||||
| // HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 | ||||
| // characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 | ||||
| // so that the JSON will be safe to embed inside HTML <script> tags. | ||||
| // For historical reasons, web browsers don't honor standard HTML | ||||
| // escaping within <script> tags, so an alternative JSON encoding must | ||||
| // be used. | ||||
| func HTMLEscape(dst *bytes.Buffer, src []byte) { | ||||
| 	// The characters can only appear in string literals, | ||||
| 	// so just scan the string one byte at a time. | ||||
| 	start := 0 | ||||
| 	for i, c := range src { | ||||
| 		if c == '<' || c == '>' || c == '&' { | ||||
| 			if start < i { | ||||
| 				dst.Write(src[start:i]) | ||||
| 			} | ||||
| 			dst.WriteString(`\u00`) | ||||
| 			dst.WriteByte(hex[c>>4]) | ||||
| 			dst.WriteByte(hex[c&0xF]) | ||||
| 			start = i + 1 | ||||
| 		} | ||||
| 		// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9). | ||||
| 		if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 { | ||||
| 			if start < i { | ||||
| 				dst.Write(src[start:i]) | ||||
| 			} | ||||
| 			dst.WriteString(`\u202`) | ||||
| 			dst.WriteByte(hex[src[i+2]&0xF]) | ||||
| 			start = i + 3 | ||||
| 		} | ||||
| 	} | ||||
| 	if start < len(src) { | ||||
| 		dst.Write(src[start:]) | ||||
| 	} | ||||
| 	return b2, nil | ||||
| } | ||||
|  | ||||
| // Marshaler is the interface implemented by types that | ||||
| @@ -251,7 +218,7 @@ type Marshaler interface { | ||||
| 	MarshalJSON() ([]byte, error) | ||||
| } | ||||
|  | ||||
| // An UnsupportedTypeError is returned by Marshal when attempting | ||||
| // An UnsupportedTypeError is returned by [Marshal] when attempting | ||||
| // to encode an unsupported value type. | ||||
| type UnsupportedTypeError struct { | ||||
| 	Type reflect.Type | ||||
| @@ -261,7 +228,7 @@ func (e *UnsupportedTypeError) Error() string { | ||||
| 	return "json: unsupported type: " + e.Type.String() | ||||
| } | ||||
|  | ||||
| // An UnsupportedValueError is returned by Marshal when attempting | ||||
| // An UnsupportedValueError is returned by [Marshal] when attempting | ||||
| // to encode an unsupported value. | ||||
| type UnsupportedValueError struct { | ||||
| 	Value reflect.Value | ||||
| @@ -272,9 +239,9 @@ func (e *UnsupportedValueError) Error() string { | ||||
| 	return "json: unsupported value: " + e.Str | ||||
| } | ||||
|  | ||||
| // Before Go 1.2, an InvalidUTF8Error was returned by Marshal when | ||||
| // Before Go 1.2, an InvalidUTF8Error was returned by [Marshal] when | ||||
| // attempting to encode a string value with invalid UTF-8 sequences. | ||||
| // As of Go 1.2, Marshal instead coerces the string to valid UTF-8 by | ||||
| // As of Go 1.2, [Marshal] instead coerces the string to valid UTF-8 by | ||||
| // replacing invalid bytes with the Unicode replacement rune U+FFFD. | ||||
| // | ||||
| // Deprecated: No longer used; kept for compatibility. | ||||
| @@ -286,7 +253,8 @@ func (e *InvalidUTF8Error) Error() string { | ||||
| 	return "json: invalid UTF-8 in string: " + strconv.Quote(e.S) | ||||
| } | ||||
|  | ||||
| // A MarshalerError represents an error from calling a MarshalJSON or MarshalText method. | ||||
| // A MarshalerError represents an error from calling a | ||||
| // [Marshaler.MarshalJSON] or [encoding.TextMarshaler.MarshalText] method. | ||||
| type MarshalerError struct { | ||||
| 	Type       reflect.Type | ||||
| 	Err        error | ||||
| @@ -306,12 +274,11 @@ func (e *MarshalerError) Error() string { | ||||
| // Unwrap returns the underlying error. | ||||
| func (e *MarshalerError) Unwrap() error { return e.Err } | ||||
|  | ||||
| var hex = "0123456789abcdef" | ||||
| const hex = "0123456789abcdef" | ||||
|  | ||||
| // An encodeState encodes JSON into a bytes.Buffer. | ||||
| type encodeState struct { | ||||
| 	bytes.Buffer // accumulated output | ||||
| 	scratch      [64]byte | ||||
|  | ||||
| 	// Keep track of what pointers we've seen in the current recursive call | ||||
| 	// path, to avoid cycles that could lead to a stack overflow. Only do | ||||
| @@ -367,16 +334,12 @@ func isEmptyValue(v reflect.Value) bool { | ||||
| 	switch v.Kind() { | ||||
| 	case reflect.Array, reflect.Map, reflect.Slice, reflect.String: | ||||
| 		return v.Len() == 0 | ||||
| 	case reflect.Bool: | ||||
| 		return !v.Bool() | ||||
| 	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: | ||||
| 		return v.Int() == 0 | ||||
| 	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: | ||||
| 		return v.Uint() == 0 | ||||
| 	case reflect.Float32, reflect.Float64: | ||||
| 		return v.Float() == 0 | ||||
| 	case reflect.Interface, reflect.Pointer: | ||||
| 		return v.IsNil() | ||||
| 	case reflect.Bool, | ||||
| 		reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, | ||||
| 		reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, | ||||
| 		reflect.Float32, reflect.Float64, | ||||
| 		reflect.Interface, reflect.Pointer: | ||||
| 		return v.IsZero() | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
| @@ -386,7 +349,6 @@ func (e *encodeState) reflectValue(v reflect.Value, opts encOpts) { | ||||
| 	if opts.tagkey != nil { | ||||
| 		tagkey = *opts.tagkey | ||||
| 	} | ||||
|  | ||||
| 	valueEncoder(v, tagkey)(e, v, opts) | ||||
| } | ||||
|  | ||||
| @@ -418,7 +380,7 @@ func valueEncoder(v reflect.Value, tagkey string) encoderFunc { | ||||
| } | ||||
|  | ||||
| func typeEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	if fi, ok := encoderCache.Load(t); ok { | ||||
| 	if fi, ok := encoderCache.Load(TagKeyTypeKey{t, tagkey}); ok { | ||||
| 		return fi.(encoderFunc) | ||||
| 	} | ||||
|  | ||||
| @@ -431,7 +393,7 @@ func typeEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 		f  encoderFunc | ||||
| 	) | ||||
| 	wg.Add(1) | ||||
| 	fi, loaded := encoderCache.LoadOrStore(t, encoderFunc(func(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	fi, loaded := encoderCache.LoadOrStore(TagKeyTypeKey{t, tagkey}, encoderFunc(func(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 		wg.Wait() | ||||
| 		f(e, v, opts) | ||||
| 	})) | ||||
| @@ -442,13 +404,13 @@ func typeEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	// Compute the real encoder and replace the indirect func with it. | ||||
| 	f = newTypeEncoder(t, true, tagkey) | ||||
| 	wg.Done() | ||||
| 	encoderCache.Store(t, f) | ||||
| 	encoderCache.Store(TagKeyTypeKey{t, tagkey}, f) | ||||
| 	return f | ||||
| } | ||||
|  | ||||
| var ( | ||||
| 	marshalerType     = reflect.TypeOf((*Marshaler)(nil)).Elem() | ||||
| 	textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() | ||||
| 	marshalerType     = reflect.TypeFor[Marshaler]() | ||||
| 	textMarshalerType = reflect.TypeFor[encoding.TextMarshaler]() | ||||
| ) | ||||
|  | ||||
| // newTypeEncoder constructs an encoderFunc for a type. | ||||
| @@ -517,8 +479,10 @@ func marshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	} | ||||
| 	b, err := m.MarshalJSON() | ||||
| 	if err == nil { | ||||
| 		// copy JSON into buffer, checking validity. | ||||
| 		err = compact(&e.Buffer, b, opts.escapeHTML) | ||||
| 		e.Grow(len(b)) | ||||
| 		out := e.AvailableBuffer() | ||||
| 		out, err = appendCompact(out, b, opts.escapeHTML) | ||||
| 		e.Buffer.Write(out) | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		e.error(&MarshalerError{v.Type(), err, "MarshalJSON"}) | ||||
| @@ -534,8 +498,10 @@ func addrMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	m := va.Interface().(Marshaler) | ||||
| 	b, err := m.MarshalJSON() | ||||
| 	if err == nil { | ||||
| 		// copy JSON into buffer, checking validity. | ||||
| 		err = compact(&e.Buffer, b, opts.escapeHTML) | ||||
| 		e.Grow(len(b)) | ||||
| 		out := e.AvailableBuffer() | ||||
| 		out, err = appendCompact(out, b, opts.escapeHTML) | ||||
| 		e.Buffer.Write(out) | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		e.error(&MarshalerError{v.Type(), err, "MarshalJSON"}) | ||||
| @@ -556,7 +522,7 @@ func textMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	if err != nil { | ||||
| 		e.error(&MarshalerError{v.Type(), err, "MarshalText"}) | ||||
| 	} | ||||
| 	e.stringBytes(b, opts.escapeHTML) | ||||
| 	e.Write(appendString(e.AvailableBuffer(), b, opts.escapeHTML)) | ||||
| } | ||||
|  | ||||
| func addrTextMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| @@ -570,43 +536,31 @@ func addrTextMarshalerEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	if err != nil { | ||||
| 		e.error(&MarshalerError{v.Type(), err, "MarshalText"}) | ||||
| 	} | ||||
| 	e.stringBytes(b, opts.escapeHTML) | ||||
| 	e.Write(appendString(e.AvailableBuffer(), b, opts.escapeHTML)) | ||||
| } | ||||
|  | ||||
| func boolEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	if opts.quoted { | ||||
| 		e.WriteByte('"') | ||||
| 	} | ||||
| 	if v.Bool() { | ||||
| 		e.WriteString("true") | ||||
| 	} else { | ||||
| 		e.WriteString("false") | ||||
| 	} | ||||
| 	if opts.quoted { | ||||
| 		e.WriteByte('"') | ||||
| 	} | ||||
| 	b := e.AvailableBuffer() | ||||
| 	b = mayAppendQuote(b, opts.quoted) | ||||
| 	b = strconv.AppendBool(b, v.Bool()) | ||||
| 	b = mayAppendQuote(b, opts.quoted) | ||||
| 	e.Write(b) | ||||
| } | ||||
|  | ||||
| func intEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	b := strconv.AppendInt(e.scratch[:0], v.Int(), 10) | ||||
| 	if opts.quoted { | ||||
| 		e.WriteByte('"') | ||||
| 	} | ||||
| 	b := e.AvailableBuffer() | ||||
| 	b = mayAppendQuote(b, opts.quoted) | ||||
| 	b = strconv.AppendInt(b, v.Int(), 10) | ||||
| 	b = mayAppendQuote(b, opts.quoted) | ||||
| 	e.Write(b) | ||||
| 	if opts.quoted { | ||||
| 		e.WriteByte('"') | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func uintEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	b := strconv.AppendUint(e.scratch[:0], v.Uint(), 10) | ||||
| 	if opts.quoted { | ||||
| 		e.WriteByte('"') | ||||
| 	} | ||||
| 	b := e.AvailableBuffer() | ||||
| 	b = mayAppendQuote(b, opts.quoted) | ||||
| 	b = strconv.AppendUint(b, v.Uint(), 10) | ||||
| 	b = mayAppendQuote(b, opts.quoted) | ||||
| 	e.Write(b) | ||||
| 	if opts.quoted { | ||||
| 		e.WriteByte('"') | ||||
| 	} | ||||
| } | ||||
|  | ||||
| type floatEncoder int // number of bits | ||||
| @@ -622,7 +576,8 @@ func (bits floatEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	// See golang.org/issue/6384 and golang.org/issue/14135. | ||||
| 	// Like fmt %g, but the exponent cutoffs are different | ||||
| 	// and exponents themselves are not padded to two digits. | ||||
| 	b := e.scratch[:0] | ||||
| 	b := e.AvailableBuffer() | ||||
| 	b = mayAppendQuote(b, opts.quoted) | ||||
| 	abs := math.Abs(f) | ||||
| 	fmt := byte('f') | ||||
| 	// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. | ||||
| @@ -640,14 +595,8 @@ func (bits floatEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 			b = b[:n-1] | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if opts.quoted { | ||||
| 		e.WriteByte('"') | ||||
| 	} | ||||
| 	b = mayAppendQuote(b, opts.quoted) | ||||
| 	e.Write(b) | ||||
| 	if opts.quoted { | ||||
| 		e.WriteByte('"') | ||||
| 	} | ||||
| } | ||||
|  | ||||
| var ( | ||||
| @@ -666,28 +615,32 @@ func stringEncoder(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 		if !isValidNumber(numStr) { | ||||
| 			e.error(fmt.Errorf("json: invalid number literal %q", numStr)) | ||||
| 		} | ||||
| 		if opts.quoted { | ||||
| 			e.WriteByte('"') | ||||
| 		} | ||||
| 		e.WriteString(numStr) | ||||
| 		if opts.quoted { | ||||
| 			e.WriteByte('"') | ||||
| 		} | ||||
| 		b := e.AvailableBuffer() | ||||
| 		b = mayAppendQuote(b, opts.quoted) | ||||
| 		b = append(b, numStr...) | ||||
| 		b = mayAppendQuote(b, opts.quoted) | ||||
| 		e.Write(b) | ||||
| 		return | ||||
| 	} | ||||
| 	if opts.quoted { | ||||
| 		e2 := newEncodeState() | ||||
| 		// Since we encode the string twice, we only need to escape HTML | ||||
| 		// the first time. | ||||
| 		e2.string(v.String(), opts.escapeHTML) | ||||
| 		e.stringBytes(e2.Bytes(), false) | ||||
| 		encodeStatePool.Put(e2) | ||||
| 		b := appendString(nil, v.String(), opts.escapeHTML) | ||||
| 		e.Write(appendString(e.AvailableBuffer(), b, false)) // no need to escape again since it is already escaped | ||||
| 	} else { | ||||
| 		e.string(v.String(), opts.escapeHTML) | ||||
| 		e.Write(appendString(e.AvailableBuffer(), v.String(), opts.escapeHTML)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // isValidNumber reports whether s is a valid JSON number literal. | ||||
| // | ||||
| // isValidNumber should be an internal detail, | ||||
| // but widely used packages access it using linkname. | ||||
| // Notable members of the hall of shame include: | ||||
| //   - github.com/bytedance/sonic | ||||
| // | ||||
| // Do not remove or change the type signature. | ||||
| // See go.dev/issue/67401. | ||||
| // | ||||
| //go:linkname isValidNumber | ||||
| func isValidNumber(s string) bool { | ||||
| 	// This function implements the JSON numbers grammar. | ||||
| 	// See https://tools.ietf.org/html/rfc7159#section-6 | ||||
| @@ -764,8 +717,9 @@ type structEncoder struct { | ||||
| } | ||||
|  | ||||
| type structFields struct { | ||||
| 	list      []field | ||||
| 	nameIndex map[string]int | ||||
| 	list         []field | ||||
| 	byExactName  map[string]*field | ||||
| 	byFoldedName map[string]*field | ||||
| } | ||||
|  | ||||
| func (se structEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| @@ -788,7 +742,7 @@ FieldLoop: | ||||
|  | ||||
| 		if f.omitEmpty && isEmptyValue(fv) { | ||||
| 			continue | ||||
| 		} else if opts.filter != nil && len(f.jsonfilter) > 0 && !f.jsonfilter.Contains(*opts.filter) { | ||||
| 		} else if !matchesJSONFilter(f.jsonfilter, opts.filter) { | ||||
| 			continue | ||||
| 		} | ||||
| 		e.WriteByte(next) | ||||
| @@ -808,6 +762,25 @@ FieldLoop: | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func matchesJSONFilter(filter jsonfilter, value *string) bool { | ||||
| 	if len(filter) == 0 { | ||||
| 		return true // no filter in struct | ||||
| 	} | ||||
| 	if value == nil || *value == "" { | ||||
| 		return false // no filter set, but struct has filter, return false | ||||
| 	} | ||||
| 	if len(filter) == 1 && filter[0] == "-" { | ||||
| 		return false | ||||
| 	} | ||||
| 	if filter.Contains(*value) { | ||||
| 		return true | ||||
| 	} | ||||
| 	if filter.Contains("*") { | ||||
| 		return true | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func newStructEncoder(t reflect.Type, tagkey string) encoderFunc { | ||||
| 	se := structEncoder{fields: cachedTypeFields(t, tagkey)} | ||||
| 	return se.encode | ||||
| @@ -839,22 +812,26 @@ func (me mapEncoder) encode(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 	e.WriteByte('{') | ||||
|  | ||||
| 	// Extract and sort the keys. | ||||
| 	sv := make([]reflectWithString, v.Len()) | ||||
| 	mi := v.MapRange() | ||||
| 	var ( | ||||
| 		sv  = make([]reflectWithString, v.Len()) | ||||
| 		mi  = v.MapRange() | ||||
| 		err error | ||||
| 	) | ||||
| 	for i := 0; mi.Next(); i++ { | ||||
| 		sv[i].k = mi.Key() | ||||
| 		sv[i].v = mi.Value() | ||||
| 		if err := sv[i].resolve(); err != nil { | ||||
| 		if sv[i].ks, err = resolveKeyName(mi.Key()); err != nil { | ||||
| 			e.error(fmt.Errorf("json: encoding error for type %q: %q", v.Type().String(), err.Error())) | ||||
| 		} | ||||
| 		sv[i].v = mi.Value() | ||||
| 	} | ||||
| 	sort.Slice(sv, func(i, j int) bool { return sv[i].ks < sv[j].ks }) | ||||
| 	slices.SortFunc(sv, func(i, j reflectWithString) int { | ||||
| 		return strings.Compare(i.ks, j.ks) | ||||
| 	}) | ||||
|  | ||||
| 	for i, kv := range sv { | ||||
| 		if i > 0 { | ||||
| 			e.WriteByte(',') | ||||
| 		} | ||||
| 		e.string(kv.ks, opts.escapeHTML) | ||||
| 		e.Write(appendString(e.AvailableBuffer(), kv.ks, opts.escapeHTML)) | ||||
| 		e.WriteByte(':') | ||||
| 		me.elemEnc(e, kv.v, opts) | ||||
| 	} | ||||
| @@ -885,29 +862,13 @@ func encodeByteSlice(e *encodeState, v reflect.Value, opts encOpts) { | ||||
| 		} | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	s := v.Bytes() | ||||
| 	e.WriteByte('"') | ||||
| 	encodedLen := base64.StdEncoding.EncodedLen(len(s)) | ||||
| 	if encodedLen <= len(e.scratch) { | ||||
| 		// If the encoded bytes fit in e.scratch, avoid an extra | ||||
| 		// allocation and use the cheaper Encoding.Encode. | ||||
| 		dst := e.scratch[:encodedLen] | ||||
| 		base64.StdEncoding.Encode(dst, s) | ||||
| 		e.Write(dst) | ||||
| 	} else if encodedLen <= 1024 { | ||||
| 		// The encoded bytes are short enough to allocate for, and | ||||
| 		// Encoding.Encode is still cheaper. | ||||
| 		dst := make([]byte, encodedLen) | ||||
| 		base64.StdEncoding.Encode(dst, s) | ||||
| 		e.Write(dst) | ||||
| 	} else { | ||||
| 		// The encoded bytes are too long to cheaply allocate, and | ||||
| 		// Encoding.Encode is no longer noticeably cheaper. | ||||
| 		enc := base64.NewEncoder(base64.StdEncoding, e) | ||||
| 		enc.Write(s) | ||||
| 		enc.Close() | ||||
| 	} | ||||
| 	e.WriteByte('"') | ||||
| 	b := e.AvailableBuffer() | ||||
| 	b = append(b, '"') | ||||
| 	b = base64.StdEncoding.AppendEncode(b, s) | ||||
| 	b = append(b, '"') | ||||
| 	e.Write(b) | ||||
| } | ||||
|  | ||||
| // sliceEncoder just wraps an arrayEncoder, checking to make sure the value isn't nil. | ||||
| @@ -1051,78 +1012,77 @@ func typeByIndex(t reflect.Type, index []int) reflect.Type { | ||||
| } | ||||
|  | ||||
| type reflectWithString struct { | ||||
| 	k  reflect.Value | ||||
| 	v  reflect.Value | ||||
| 	ks string | ||||
| } | ||||
|  | ||||
| func (w *reflectWithString) resolve() error { | ||||
| 	if w.k.Kind() == reflect.String { | ||||
| 		w.ks = w.k.String() | ||||
| 		return nil | ||||
| func resolveKeyName(k reflect.Value) (string, error) { | ||||
| 	if k.Kind() == reflect.String { | ||||
| 		return k.String(), nil | ||||
| 	} | ||||
| 	if tm, ok := w.k.Interface().(encoding.TextMarshaler); ok { | ||||
| 		if w.k.Kind() == reflect.Pointer && w.k.IsNil() { | ||||
| 			return nil | ||||
| 	if tm, ok := k.Interface().(encoding.TextMarshaler); ok { | ||||
| 		if k.Kind() == reflect.Pointer && k.IsNil() { | ||||
| 			return "", nil | ||||
| 		} | ||||
| 		buf, err := tm.MarshalText() | ||||
| 		w.ks = string(buf) | ||||
| 		return err | ||||
| 		return string(buf), err | ||||
| 	} | ||||
| 	switch w.k.Kind() { | ||||
| 	switch k.Kind() { | ||||
| 	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: | ||||
| 		w.ks = strconv.FormatInt(w.k.Int(), 10) | ||||
| 		return nil | ||||
| 		return strconv.FormatInt(k.Int(), 10), nil | ||||
| 	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: | ||||
| 		w.ks = strconv.FormatUint(w.k.Uint(), 10) | ||||
| 		return nil | ||||
| 		return strconv.FormatUint(k.Uint(), 10), nil | ||||
| 	} | ||||
| 	panic("unexpected map key type") | ||||
| } | ||||
|  | ||||
| // NOTE: keep in sync with stringBytes below. | ||||
| func (e *encodeState) string(s string, escapeHTML bool) { | ||||
| 	e.WriteByte('"') | ||||
| func appendString[Bytes []byte | string](dst []byte, src Bytes, escapeHTML bool) []byte { | ||||
| 	dst = append(dst, '"') | ||||
| 	start := 0 | ||||
| 	for i := 0; i < len(s); { | ||||
| 		if b := s[i]; b < utf8.RuneSelf { | ||||
| 	for i := 0; i < len(src); { | ||||
| 		if b := src[i]; b < utf8.RuneSelf { | ||||
| 			if htmlSafeSet[b] || (!escapeHTML && safeSet[b]) { | ||||
| 				i++ | ||||
| 				continue | ||||
| 			} | ||||
| 			if start < i { | ||||
| 				e.WriteString(s[start:i]) | ||||
| 			} | ||||
| 			e.WriteByte('\\') | ||||
| 			dst = append(dst, src[start:i]...) | ||||
| 			switch b { | ||||
| 			case '\\', '"': | ||||
| 				e.WriteByte(b) | ||||
| 				dst = append(dst, '\\', b) | ||||
| 			case '\b': | ||||
| 				dst = append(dst, '\\', 'b') | ||||
| 			case '\f': | ||||
| 				dst = append(dst, '\\', 'f') | ||||
| 			case '\n': | ||||
| 				e.WriteByte('n') | ||||
| 				dst = append(dst, '\\', 'n') | ||||
| 			case '\r': | ||||
| 				e.WriteByte('r') | ||||
| 				dst = append(dst, '\\', 'r') | ||||
| 			case '\t': | ||||
| 				e.WriteByte('t') | ||||
| 				dst = append(dst, '\\', 't') | ||||
| 			default: | ||||
| 				// This encodes bytes < 0x20 except for \t, \n and \r. | ||||
| 				// This encodes bytes < 0x20 except for \b, \f, \n, \r and \t. | ||||
| 				// If escapeHTML is set, it also escapes <, >, and & | ||||
| 				// because they can lead to security holes when | ||||
| 				// user-controlled strings are rendered into JSON | ||||
| 				// and served to some browsers. | ||||
| 				e.WriteString(`u00`) | ||||
| 				e.WriteByte(hex[b>>4]) | ||||
| 				e.WriteByte(hex[b&0xF]) | ||||
| 				dst = append(dst, '\\', 'u', '0', '0', hex[b>>4], hex[b&0xF]) | ||||
| 			} | ||||
| 			i++ | ||||
| 			start = i | ||||
| 			continue | ||||
| 		} | ||||
| 		c, size := utf8.DecodeRuneInString(s[i:]) | ||||
| 		// TODO(https://go.dev/issue/56948): Use generic utf8 functionality. | ||||
| 		// For now, cast only a small portion of byte slices to a string | ||||
| 		// so that it can be stack allocated. This slows down []byte slightly | ||||
| 		// due to the extra copy, but keeps string performance roughly the same. | ||||
| 		n := len(src) - i | ||||
| 		if n > utf8.UTFMax { | ||||
| 			n = utf8.UTFMax | ||||
| 		} | ||||
| 		c, size := utf8.DecodeRuneInString(string(src[i : i+n])) | ||||
| 		if c == utf8.RuneError && size == 1 { | ||||
| 			if start < i { | ||||
| 				e.WriteString(s[start:i]) | ||||
| 			} | ||||
| 			e.WriteString(`\ufffd`) | ||||
| 			dst = append(dst, src[start:i]...) | ||||
| 			dst = append(dst, `\ufffd`...) | ||||
| 			i += size | ||||
| 			start = i | ||||
| 			continue | ||||
| @@ -1133,102 +1093,25 @@ func (e *encodeState) string(s string, escapeHTML bool) { | ||||
| 		// but don't work in JSONP, which has to be evaluated as JavaScript, | ||||
| 		// and can lead to security holes there. It is valid JSON to | ||||
| 		// escape them, so we do so unconditionally. | ||||
| 		// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion. | ||||
| 		// See https://en.wikipedia.org/wiki/JSON#Safety. | ||||
| 		if c == '\u2028' || c == '\u2029' { | ||||
| 			if start < i { | ||||
| 				e.WriteString(s[start:i]) | ||||
| 			} | ||||
| 			e.WriteString(`\u202`) | ||||
| 			e.WriteByte(hex[c&0xF]) | ||||
| 			dst = append(dst, src[start:i]...) | ||||
| 			dst = append(dst, '\\', 'u', '2', '0', '2', hex[c&0xF]) | ||||
| 			i += size | ||||
| 			start = i | ||||
| 			continue | ||||
| 		} | ||||
| 		i += size | ||||
| 	} | ||||
| 	if start < len(s) { | ||||
| 		e.WriteString(s[start:]) | ||||
| 	} | ||||
| 	e.WriteByte('"') | ||||
| } | ||||
|  | ||||
| // NOTE: keep in sync with string above. | ||||
| func (e *encodeState) stringBytes(s []byte, escapeHTML bool) { | ||||
| 	e.WriteByte('"') | ||||
| 	start := 0 | ||||
| 	for i := 0; i < len(s); { | ||||
| 		if b := s[i]; b < utf8.RuneSelf { | ||||
| 			if htmlSafeSet[b] || (!escapeHTML && safeSet[b]) { | ||||
| 				i++ | ||||
| 				continue | ||||
| 			} | ||||
| 			if start < i { | ||||
| 				e.Write(s[start:i]) | ||||
| 			} | ||||
| 			e.WriteByte('\\') | ||||
| 			switch b { | ||||
| 			case '\\', '"': | ||||
| 				e.WriteByte(b) | ||||
| 			case '\n': | ||||
| 				e.WriteByte('n') | ||||
| 			case '\r': | ||||
| 				e.WriteByte('r') | ||||
| 			case '\t': | ||||
| 				e.WriteByte('t') | ||||
| 			default: | ||||
| 				// This encodes bytes < 0x20 except for \t, \n and \r. | ||||
| 				// If escapeHTML is set, it also escapes <, >, and & | ||||
| 				// because they can lead to security holes when | ||||
| 				// user-controlled strings are rendered into JSON | ||||
| 				// and served to some browsers. | ||||
| 				e.WriteString(`u00`) | ||||
| 				e.WriteByte(hex[b>>4]) | ||||
| 				e.WriteByte(hex[b&0xF]) | ||||
| 			} | ||||
| 			i++ | ||||
| 			start = i | ||||
| 			continue | ||||
| 		} | ||||
| 		c, size := utf8.DecodeRune(s[i:]) | ||||
| 		if c == utf8.RuneError && size == 1 { | ||||
| 			if start < i { | ||||
| 				e.Write(s[start:i]) | ||||
| 			} | ||||
| 			e.WriteString(`\ufffd`) | ||||
| 			i += size | ||||
| 			start = i | ||||
| 			continue | ||||
| 		} | ||||
| 		// U+2028 is LINE SEPARATOR. | ||||
| 		// U+2029 is PARAGRAPH SEPARATOR. | ||||
| 		// They are both technically valid characters in JSON strings, | ||||
| 		// but don't work in JSONP, which has to be evaluated as JavaScript, | ||||
| 		// and can lead to security holes there. It is valid JSON to | ||||
| 		// escape them, so we do so unconditionally. | ||||
| 		// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion. | ||||
| 		if c == '\u2028' || c == '\u2029' { | ||||
| 			if start < i { | ||||
| 				e.Write(s[start:i]) | ||||
| 			} | ||||
| 			e.WriteString(`\u202`) | ||||
| 			e.WriteByte(hex[c&0xF]) | ||||
| 			i += size | ||||
| 			start = i | ||||
| 			continue | ||||
| 		} | ||||
| 		i += size | ||||
| 	} | ||||
| 	if start < len(s) { | ||||
| 		e.Write(s[start:]) | ||||
| 	} | ||||
| 	e.WriteByte('"') | ||||
| 	dst = append(dst, src[start:]...) | ||||
| 	dst = append(dst, '"') | ||||
| 	return dst | ||||
| } | ||||
|  | ||||
| // A field represents a single field found in a struct. | ||||
| type field struct { | ||||
| 	name      string | ||||
| 	nameBytes []byte                 // []byte(name) | ||||
| 	equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent | ||||
| 	nameBytes []byte // []byte(name) | ||||
|  | ||||
| 	nameNonEsc  string // `"` + name + `":` | ||||
| 	nameEscHTML string // `"` + HTMLEscape(name) + `":` | ||||
| @@ -1255,28 +1138,19 @@ func (j jsonfilter) Contains(t string) bool { | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // byIndex sorts field by index sequence. | ||||
| type byIndex []field | ||||
|  | ||||
| func (x byIndex) Len() int { return len(x) } | ||||
|  | ||||
| func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } | ||||
|  | ||||
| func (x byIndex) Less(i, j int) bool { | ||||
| 	for k, xik := range x[i].index { | ||||
| 		if k >= len(x[j].index) { | ||||
| 			return false | ||||
| 		} | ||||
| 		if xik != x[j].index[k] { | ||||
| 			return xik < x[j].index[k] | ||||
| 		} | ||||
| 	} | ||||
| 	return len(x[i].index) < len(x[j].index) | ||||
| } | ||||
|  | ||||
| // typeFields returns a list of fields that JSON should recognize for the given type. | ||||
| // The algorithm is breadth-first search over the set of structs to include - the top struct | ||||
| // and then any reachable anonymous structs. | ||||
| // | ||||
| // typeFields should be an internal detail, | ||||
| // but widely used packages access it using linkname. | ||||
| // Notable members of the hall of shame include: | ||||
| //   - github.com/bytedance/sonic | ||||
| // | ||||
| // Do not remove or change the type signature. | ||||
| // See go.dev/issue/67401. | ||||
| // | ||||
| //go:linkname typeFields | ||||
| func typeFields(t reflect.Type, tagkey string) structFields { | ||||
| 	// Anonymous fields to explore at the current level and the next. | ||||
| 	current := []field{} | ||||
| @@ -1291,8 +1165,8 @@ func typeFields(t reflect.Type, tagkey string) structFields { | ||||
| 	// Fields found. | ||||
| 	var fields []field | ||||
|  | ||||
| 	// Buffer to run HTMLEscape on field names. | ||||
| 	var nameEscBuf bytes.Buffer | ||||
| 	// Buffer to run appendHTMLEscape on field names. | ||||
| 	var nameEscBuf []byte | ||||
|  | ||||
| 	for len(next) > 0 { | ||||
| 		current, next = next, current[:0] | ||||
| @@ -1331,10 +1205,10 @@ func typeFields(t reflect.Type, tagkey string) structFields { | ||||
| 					name = "" | ||||
| 				} | ||||
|  | ||||
| 				var jsonfilter []string | ||||
| 				var jsonfilterVal []string | ||||
| 				jsonfilterTag := sf.Tag.Get("jsonfilter") | ||||
| 				if jsonfilterTag != "" && jsonfilterTag != "-" { | ||||
| 					jsonfilter = strings.Split(jsonfilterTag, ",") | ||||
| 				if jsonfilterTag != "" { | ||||
| 					jsonfilterVal = strings.Split(jsonfilterTag, ",") | ||||
| 				} | ||||
|  | ||||
| 				index := make([]int, len(f.index)+1) | ||||
| @@ -1372,25 +1246,21 @@ func typeFields(t reflect.Type, tagkey string) structFields { | ||||
| 						index:      index, | ||||
| 						typ:        ft, | ||||
| 						omitEmpty:  opts.Contains("omitempty"), | ||||
| 						jsonfilter: jsonfilter, | ||||
| 						jsonfilter: jsonfilterVal, | ||||
| 						quoted:     quoted, | ||||
| 					} | ||||
| 					field.nameBytes = []byte(field.name) | ||||
| 					field.equalFold = foldFunc(field.nameBytes) | ||||
|  | ||||
| 					// Build nameEscHTML and nameNonEsc ahead of time. | ||||
| 					nameEscBuf.Reset() | ||||
| 					nameEscBuf.WriteString(`"`) | ||||
| 					HTMLEscape(&nameEscBuf, field.nameBytes) | ||||
| 					nameEscBuf.WriteString(`":`) | ||||
| 					field.nameEscHTML = nameEscBuf.String() | ||||
| 					nameEscBuf = appendHTMLEscape(nameEscBuf[:0], field.nameBytes) | ||||
| 					field.nameEscHTML = `"` + string(nameEscBuf) + `":` | ||||
| 					field.nameNonEsc = `"` + field.name + `":` | ||||
|  | ||||
| 					fields = append(fields, field) | ||||
| 					if count[f.typ] > 1 { | ||||
| 						// If there were multiple instances, add a second, | ||||
| 						// so that the annihilation code will see a duplicate. | ||||
| 						// It only cares about the distinction between 1 or 2, | ||||
| 						// It only cares about the distinction between 1 and 2, | ||||
| 						// so don't bother generating any more copies. | ||||
| 						fields = append(fields, fields[len(fields)-1]) | ||||
| 					} | ||||
| @@ -1406,21 +1276,23 @@ func typeFields(t reflect.Type, tagkey string) structFields { | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	sort.Slice(fields, func(i, j int) bool { | ||||
| 		x := fields | ||||
| 	slices.SortFunc(fields, func(a, b field) int { | ||||
| 		// sort field by name, breaking ties with depth, then | ||||
| 		// breaking ties with "name came from json tag", then | ||||
| 		// breaking ties with index sequence. | ||||
| 		if x[i].name != x[j].name { | ||||
| 			return x[i].name < x[j].name | ||||
| 		if c := strings.Compare(a.name, b.name); c != 0 { | ||||
| 			return c | ||||
| 		} | ||||
| 		if len(x[i].index) != len(x[j].index) { | ||||
| 			return len(x[i].index) < len(x[j].index) | ||||
| 		if c := cmp.Compare(len(a.index), len(b.index)); c != 0 { | ||||
| 			return c | ||||
| 		} | ||||
| 		if x[i].tag != x[j].tag { | ||||
| 			return x[i].tag | ||||
| 		if a.tag != b.tag { | ||||
| 			if a.tag { | ||||
| 				return -1 | ||||
| 			} | ||||
| 			return +1 | ||||
| 		} | ||||
| 		return byIndex(x).Less(i, j) | ||||
| 		return slices.Compare(a.index, b.index) | ||||
| 	}) | ||||
|  | ||||
| 	// Delete all fields that are hidden by the Go rules for embedded fields, | ||||
| @@ -1452,17 +1324,24 @@ func typeFields(t reflect.Type, tagkey string) structFields { | ||||
| 	} | ||||
|  | ||||
| 	fields = out | ||||
| 	sort.Sort(byIndex(fields)) | ||||
| 	slices.SortFunc(fields, func(i, j field) int { | ||||
| 		return slices.Compare(i.index, j.index) | ||||
| 	}) | ||||
|  | ||||
| 	for i := range fields { | ||||
| 		f := &fields[i] | ||||
| 		f.encoder = typeEncoder(typeByIndex(t, f.index), tagkey) | ||||
| 	} | ||||
| 	nameIndex := make(map[string]int, len(fields)) | ||||
| 	exactNameIndex := make(map[string]*field, len(fields)) | ||||
| 	foldedNameIndex := make(map[string]*field, len(fields)) | ||||
| 	for i, field := range fields { | ||||
| 		nameIndex[field.name] = i | ||||
| 		exactNameIndex[field.name] = &fields[i] | ||||
| 		// For historical reasons, first folded match takes precedence. | ||||
| 		if _, ok := foldedNameIndex[string(foldName(field.nameBytes))]; !ok { | ||||
| 			foldedNameIndex[string(foldName(field.nameBytes))] = &fields[i] | ||||
| 		} | ||||
| 	} | ||||
| 	return structFields{fields, nameIndex} | ||||
| 	return structFields{fields, exactNameIndex, foldedNameIndex} | ||||
| } | ||||
|  | ||||
| // dominantField looks through the fields, all of which are known to | ||||
| @@ -1481,26 +1360,25 @@ func dominantField(fields []field) (field, bool) { | ||||
| 	return fields[0], true | ||||
| } | ||||
|  | ||||
| var fieldCache sync.Map // map[string]map[reflect.Type]structFields | ||||
| var fieldCache sync.Map // map[reflect.Type + tagkey]structFields | ||||
|  | ||||
| // cachedTypeFields is like typeFields but uses a cache to avoid repeated work. | ||||
| func cachedTypeFields(t reflect.Type, tagkey string) structFields { | ||||
| 	if m0, ok := fieldCache.Load(tagkey); ok { | ||||
|  | ||||
| 		if f, ok := m0.(*sync.Map).Load(t); ok { | ||||
| 			return f.(structFields) | ||||
| 		} | ||||
| 		f, _ := m0.(*sync.Map).LoadOrStore(t, typeFields(t, tagkey)) | ||||
| 		return f.(structFields) | ||||
|  | ||||
| 	} else { | ||||
|  | ||||
| 		m0 := &sync.Map{} | ||||
| 		f, _ := m0.LoadOrStore(t, typeFields(t, tagkey)) | ||||
|  | ||||
| 		fieldCache.Store(tagkey, m0) | ||||
|  | ||||
| 	if f, ok := fieldCache.Load(TagKeyTypeKey{t, tagkey}); ok { | ||||
| 		return f.(structFields) | ||||
| 	} | ||||
|  | ||||
| 	f, _ := fieldCache.LoadOrStore(TagKeyTypeKey{t, tagkey}, typeFields(t, tagkey)) | ||||
| 	return f.(structFields) | ||||
| } | ||||
|  | ||||
| func mayAppendQuote(b []byte, quoted bool) []byte { | ||||
| 	if quoted { | ||||
| 		b = append(b, '"') | ||||
| 	} | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| type TagKeyTypeKey struct { | ||||
| 	Type   reflect.Type | ||||
| 	TagKey string | ||||
| } | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										150
									
								
								gojson/fold.go
									
									
									
									
									
								
							
							
						
						
									
										150
									
								
								gojson/fold.go
									
									
									
									
									
								
							| @@ -5,140 +5,44 @@ | ||||
| package json | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"unicode" | ||||
| 	"unicode/utf8" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	caseMask     = ^byte(0x20) // Mask to ignore case in ASCII. | ||||
| 	kelvin       = '\u212a' | ||||
| 	smallLongEss = '\u017f' | ||||
| ) | ||||
|  | ||||
| // foldFunc returns one of four different case folding equivalence | ||||
| // functions, from most general (and slow) to fastest: | ||||
| // | ||||
| // 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8 | ||||
| // 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S') | ||||
| // 3) asciiEqualFold, no special, but includes non-letters (including _) | ||||
| // 4) simpleLetterEqualFold, no specials, no non-letters. | ||||
| // | ||||
| // The letters S and K are special because they map to 3 runes, not just 2: | ||||
| //   - S maps to s and to U+017F 'ſ' Latin small letter long s | ||||
| //   - k maps to K and to U+212A 'K' Kelvin sign | ||||
| // | ||||
| // See https://play.golang.org/p/tTxjOc0OGo | ||||
| // | ||||
| // The returned function is specialized for matching against s and | ||||
| // should only be given s. It's not curried for performance reasons. | ||||
| func foldFunc(s []byte) func(s, t []byte) bool { | ||||
| 	nonLetter := false | ||||
| 	special := false // special letter | ||||
| 	for _, b := range s { | ||||
| 		if b >= utf8.RuneSelf { | ||||
| 			return bytes.EqualFold | ||||
| 		} | ||||
| 		upper := b & caseMask | ||||
| 		if upper < 'A' || upper > 'Z' { | ||||
| 			nonLetter = true | ||||
| 		} else if upper == 'K' || upper == 'S' { | ||||
| 			// See above for why these letters are special. | ||||
| 			special = true | ||||
| 		} | ||||
| 	} | ||||
| 	if special { | ||||
| 		return equalFoldRight | ||||
| 	} | ||||
| 	if nonLetter { | ||||
| 		return asciiEqualFold | ||||
| 	} | ||||
| 	return simpleLetterEqualFold | ||||
| // foldName returns a folded string such that foldName(x) == foldName(y) | ||||
| // is identical to bytes.EqualFold(x, y). | ||||
| func foldName(in []byte) []byte { | ||||
| 	// This is inlinable to take advantage of "function outlining". | ||||
| 	var arr [32]byte // large enough for most JSON names | ||||
| 	return appendFoldedName(arr[:0], in) | ||||
| } | ||||
|  | ||||
| // equalFoldRight is a specialization of bytes.EqualFold when s is | ||||
| // known to be all ASCII (including punctuation), but contains an 's', | ||||
| // 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t. | ||||
| // See comments on foldFunc. | ||||
| func equalFoldRight(s, t []byte) bool { | ||||
| 	for _, sb := range s { | ||||
| 		if len(t) == 0 { | ||||
| 			return false | ||||
| 		} | ||||
| 		tb := t[0] | ||||
| 		if tb < utf8.RuneSelf { | ||||
| 			if sb != tb { | ||||
| 				sbUpper := sb & caseMask | ||||
| 				if 'A' <= sbUpper && sbUpper <= 'Z' { | ||||
| 					if sbUpper != tb&caseMask { | ||||
| 						return false | ||||
| 					} | ||||
| 				} else { | ||||
| 					return false | ||||
| 				} | ||||
| func appendFoldedName(out, in []byte) []byte { | ||||
| 	for i := 0; i < len(in); { | ||||
| 		// Handle single-byte ASCII. | ||||
| 		if c := in[i]; c < utf8.RuneSelf { | ||||
| 			if 'a' <= c && c <= 'z' { | ||||
| 				c -= 'a' - 'A' | ||||
| 			} | ||||
| 			t = t[1:] | ||||
| 			out = append(out, c) | ||||
| 			i++ | ||||
| 			continue | ||||
| 		} | ||||
| 		// sb is ASCII and t is not. t must be either kelvin | ||||
| 		// sign or long s; sb must be s, S, k, or K. | ||||
| 		tr, size := utf8.DecodeRune(t) | ||||
| 		switch sb { | ||||
| 		case 's', 'S': | ||||
| 			if tr != smallLongEss { | ||||
| 				return false | ||||
| 			} | ||||
| 		case 'k', 'K': | ||||
| 			if tr != kelvin { | ||||
| 				return false | ||||
| 			} | ||||
| 		default: | ||||
| 			return false | ||||
| 		} | ||||
| 		t = t[size:] | ||||
|  | ||||
| 		// Handle multi-byte Unicode. | ||||
| 		r, n := utf8.DecodeRune(in[i:]) | ||||
| 		out = utf8.AppendRune(out, foldRune(r)) | ||||
| 		i += n | ||||
| 	} | ||||
| 	if len(t) > 0 { | ||||
| 		return false | ||||
| 	} | ||||
| 	return true | ||||
| 	return out | ||||
| } | ||||
|  | ||||
| // asciiEqualFold is a specialization of bytes.EqualFold for use when | ||||
| // s is all ASCII (but may contain non-letters) and contains no | ||||
| // special-folding letters. | ||||
| // See comments on foldFunc. | ||||
| func asciiEqualFold(s, t []byte) bool { | ||||
| 	if len(s) != len(t) { | ||||
| 		return false | ||||
| 	} | ||||
| 	for i, sb := range s { | ||||
| 		tb := t[i] | ||||
| 		if sb == tb { | ||||
| 			continue | ||||
| 		} | ||||
| 		if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') { | ||||
| 			if sb&caseMask != tb&caseMask { | ||||
| 				return false | ||||
| 			} | ||||
| 		} else { | ||||
| 			return false | ||||
| // foldRune is returns the smallest rune for all runes in the same fold set. | ||||
| func foldRune(r rune) rune { | ||||
| 	for { | ||||
| 		r2 := unicode.SimpleFold(r) | ||||
| 		if r2 <= r { | ||||
| 			return r2 | ||||
| 		} | ||||
| 		r = r2 | ||||
| 	} | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| // simpleLetterEqualFold is a specialization of bytes.EqualFold for | ||||
| // use when s is all ASCII letters (no underscores, etc) and also | ||||
| // doesn't contain 'k', 'K', 's', or 'S'. | ||||
| // See comments on foldFunc. | ||||
| func simpleLetterEqualFold(s, t []byte) bool { | ||||
| 	if len(s) != len(t) { | ||||
| 		return false | ||||
| 	} | ||||
| 	for i, b := range s { | ||||
| 		if b&caseMask != t[i]&caseMask { | ||||
| 			return false | ||||
| 		} | ||||
| 	} | ||||
| 	return true | ||||
| } | ||||
|   | ||||
| @@ -6,111 +6,45 @@ package json | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"strings" | ||||
| 	"testing" | ||||
| 	"unicode/utf8" | ||||
| ) | ||||
|  | ||||
| var foldTests = []struct { | ||||
| 	fn   func(s, t []byte) bool | ||||
| 	s, t string | ||||
| 	want bool | ||||
| }{ | ||||
| 	{equalFoldRight, "", "", true}, | ||||
| 	{equalFoldRight, "a", "a", true}, | ||||
| 	{equalFoldRight, "", "a", false}, | ||||
| 	{equalFoldRight, "a", "", false}, | ||||
| 	{equalFoldRight, "a", "A", true}, | ||||
| 	{equalFoldRight, "AB", "ab", true}, | ||||
| 	{equalFoldRight, "AB", "ac", false}, | ||||
| 	{equalFoldRight, "sbkKc", "ſbKKc", true}, | ||||
| 	{equalFoldRight, "SbKkc", "ſbKKc", true}, | ||||
| 	{equalFoldRight, "SbKkc", "ſbKK", false}, | ||||
| 	{equalFoldRight, "e", "é", false}, | ||||
| 	{equalFoldRight, "s", "S", true}, | ||||
|  | ||||
| 	{simpleLetterEqualFold, "", "", true}, | ||||
| 	{simpleLetterEqualFold, "abc", "abc", true}, | ||||
| 	{simpleLetterEqualFold, "abc", "ABC", true}, | ||||
| 	{simpleLetterEqualFold, "abc", "ABCD", false}, | ||||
| 	{simpleLetterEqualFold, "abc", "xxx", false}, | ||||
|  | ||||
| 	{asciiEqualFold, "a_B", "A_b", true}, | ||||
| 	{asciiEqualFold, "aa@", "aa`", false}, // verify 0x40 and 0x60 aren't case-equivalent | ||||
| } | ||||
|  | ||||
| func TestFold(t *testing.T) { | ||||
| 	for i, tt := range foldTests { | ||||
| 		if got := tt.fn([]byte(tt.s), []byte(tt.t)); got != tt.want { | ||||
| 			t.Errorf("%d. %q, %q = %v; want %v", i, tt.s, tt.t, got, tt.want) | ||||
| func FuzzEqualFold(f *testing.F) { | ||||
| 	for _, ss := range [][2]string{ | ||||
| 		{"", ""}, | ||||
| 		{"123abc", "123ABC"}, | ||||
| 		{"αβδ", "ΑΒΔ"}, | ||||
| 		{"abc", "xyz"}, | ||||
| 		{"abc", "XYZ"}, | ||||
| 		{"1", "2"}, | ||||
| 		{"hello, world!", "hello, world!"}, | ||||
| 		{"hello, world!", "Hello, World!"}, | ||||
| 		{"hello, world!", "HELLO, WORLD!"}, | ||||
| 		{"hello, world!", "jello, world!"}, | ||||
| 		{"γειά, κόσμε!", "γειά, κόσμε!"}, | ||||
| 		{"γειά, κόσμε!", "Γειά, Κόσμε!"}, | ||||
| 		{"γειά, κόσμε!", "ΓΕΙΆ, ΚΌΣΜΕ!"}, | ||||
| 		{"γειά, κόσμε!", "ΛΕΙΆ, ΚΌΣΜΕ!"}, | ||||
| 		{"AESKey", "aesKey"}, | ||||
| 		{"AESKEY", "aes_key"}, | ||||
| 		{"aes_key", "AES_KEY"}, | ||||
| 		{"AES_KEY", "aes-key"}, | ||||
| 		{"aes-key", "AES-KEY"}, | ||||
| 		{"AES-KEY", "aesKey"}, | ||||
| 		{"aesKey", "AesKey"}, | ||||
| 		{"AesKey", "AESKey"}, | ||||
| 		{"AESKey", "aeskey"}, | ||||
| 		{"DESKey", "aeskey"}, | ||||
| 		{"AES Key", "aeskey"}, | ||||
| 	} { | ||||
| 		f.Add([]byte(ss[0]), []byte(ss[1])) | ||||
| 	} | ||||
| 	equalFold := func(x, y []byte) bool { return string(foldName(x)) == string(foldName(y)) } | ||||
| 	f.Fuzz(func(t *testing.T, x, y []byte) { | ||||
| 		got := equalFold(x, y) | ||||
| 		want := bytes.EqualFold(x, y) | ||||
| 		if got != want { | ||||
| 			t.Errorf("equalFold(%q, %q) = %v, want %v", x, y, got, want) | ||||
| 		} | ||||
| 		truth := strings.EqualFold(tt.s, tt.t) | ||||
| 		if truth != tt.want { | ||||
| 			t.Errorf("strings.EqualFold doesn't agree with case %d", i) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestFoldAgainstUnicode(t *testing.T) { | ||||
| 	const bufSize = 5 | ||||
| 	buf1 := make([]byte, 0, bufSize) | ||||
| 	buf2 := make([]byte, 0, bufSize) | ||||
| 	var runes []rune | ||||
| 	for i := 0x20; i <= 0x7f; i++ { | ||||
| 		runes = append(runes, rune(i)) | ||||
| 	} | ||||
| 	runes = append(runes, kelvin, smallLongEss) | ||||
|  | ||||
| 	funcs := []struct { | ||||
| 		name   string | ||||
| 		fold   func(s, t []byte) bool | ||||
| 		letter bool // must be ASCII letter | ||||
| 		simple bool // must be simple ASCII letter (not 'S' or 'K') | ||||
| 	}{ | ||||
| 		{ | ||||
| 			name: "equalFoldRight", | ||||
| 			fold: equalFoldRight, | ||||
| 		}, | ||||
| 		{ | ||||
| 			name:   "asciiEqualFold", | ||||
| 			fold:   asciiEqualFold, | ||||
| 			simple: true, | ||||
| 		}, | ||||
| 		{ | ||||
| 			name:   "simpleLetterEqualFold", | ||||
| 			fold:   simpleLetterEqualFold, | ||||
| 			simple: true, | ||||
| 			letter: true, | ||||
| 		}, | ||||
| 	} | ||||
|  | ||||
| 	for _, ff := range funcs { | ||||
| 		for _, r := range runes { | ||||
| 			if r >= utf8.RuneSelf { | ||||
| 				continue | ||||
| 			} | ||||
| 			if ff.letter && !isASCIILetter(byte(r)) { | ||||
| 				continue | ||||
| 			} | ||||
| 			if ff.simple && (r == 's' || r == 'S' || r == 'k' || r == 'K') { | ||||
| 				continue | ||||
| 			} | ||||
| 			for _, r2 := range runes { | ||||
| 				buf1 := append(buf1[:0], 'x') | ||||
| 				buf2 := append(buf2[:0], 'x') | ||||
| 				buf1 = buf1[:1+utf8.EncodeRune(buf1[1:bufSize], r)] | ||||
| 				buf2 = buf2[:1+utf8.EncodeRune(buf2[1:bufSize], r2)] | ||||
| 				buf1 = append(buf1, 'x') | ||||
| 				buf2 = append(buf2, 'x') | ||||
| 				want := bytes.EqualFold(buf1, buf2) | ||||
| 				if got := ff.fold(buf1, buf2); got != want { | ||||
| 					t.Errorf("%s(%q, %q) = %v; want %v", ff.name, buf1, buf2, got, want) | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func isASCIILetter(b byte) bool { | ||||
| 	return ('A' <= b && b <= 'Z') || ('a' <= b && b <= 'z') | ||||
| 	}) | ||||
| } | ||||
|   | ||||
| @@ -1,42 +0,0 @@ | ||||
| // Copyright 2019 The Go Authors. All rights reserved. | ||||
| // Use of this source code is governed by a BSD-style | ||||
| // license that can be found in the LICENSE file. | ||||
|  | ||||
| //go:build gofuzz | ||||
|  | ||||
| package json | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| ) | ||||
|  | ||||
| func Fuzz(data []byte) (score int) { | ||||
| 	for _, ctor := range []func() any{ | ||||
| 		func() any { return new(any) }, | ||||
| 		func() any { return new(map[string]any) }, | ||||
| 		func() any { return new([]any) }, | ||||
| 	} { | ||||
| 		v := ctor() | ||||
| 		err := Unmarshal(data, v) | ||||
| 		if err != nil { | ||||
| 			continue | ||||
| 		} | ||||
| 		score = 1 | ||||
|  | ||||
| 		m, err := Marshal(v) | ||||
| 		if err != nil { | ||||
| 			fmt.Printf("v=%#v\n", v) | ||||
| 			panic(err) | ||||
| 		} | ||||
|  | ||||
| 		u := ctor() | ||||
| 		err = Unmarshal(m, u) | ||||
| 		if err != nil { | ||||
| 			fmt.Printf("v=%#v\n", v) | ||||
| 			fmt.Printf("m=%s\n", m) | ||||
| 			panic(err) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return | ||||
| } | ||||
| @@ -25,7 +25,6 @@ func (r GoJsonRender) Render(w http.ResponseWriter) error { | ||||
| 	if val := header["Content-Type"]; len(val) == 0 { | ||||
| 		header["Content-Type"] = []string{"application/json; charset=utf-8"} | ||||
| 	} | ||||
|  | ||||
| 	jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent, r.Filter) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
|   | ||||
							
								
								
									
										119
									
								
								gojson/indent.go
									
									
									
									
									
								
							
							
						
						
									
										119
									
								
								gojson/indent.go
									
									
									
									
									
								
							| @@ -4,38 +4,67 @@ | ||||
|  | ||||
| package json | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| ) | ||||
| import "bytes" | ||||
|  | ||||
| // HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 | ||||
| // characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 | ||||
| // so that the JSON will be safe to embed inside HTML <script> tags. | ||||
| // For historical reasons, web browsers don't honor standard HTML | ||||
| // escaping within <script> tags, so an alternative JSON encoding must be used. | ||||
| func HTMLEscape(dst *bytes.Buffer, src []byte) { | ||||
| 	dst.Grow(len(src)) | ||||
| 	dst.Write(appendHTMLEscape(dst.AvailableBuffer(), src)) | ||||
| } | ||||
|  | ||||
| func appendHTMLEscape(dst, src []byte) []byte { | ||||
| 	// The characters can only appear in string literals, | ||||
| 	// so just scan the string one byte at a time. | ||||
| 	start := 0 | ||||
| 	for i, c := range src { | ||||
| 		if c == '<' || c == '>' || c == '&' { | ||||
| 			dst = append(dst, src[start:i]...) | ||||
| 			dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF]) | ||||
| 			start = i + 1 | ||||
| 		} | ||||
| 		// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9). | ||||
| 		if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 { | ||||
| 			dst = append(dst, src[start:i]...) | ||||
| 			dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF]) | ||||
| 			start = i + len("\u2029") | ||||
| 		} | ||||
| 	} | ||||
| 	return append(dst, src[start:]...) | ||||
| } | ||||
|  | ||||
| // Compact appends to dst the JSON-encoded src with | ||||
| // insignificant space characters elided. | ||||
| func Compact(dst *bytes.Buffer, src []byte) error { | ||||
| 	return compact(dst, src, false) | ||||
| 	dst.Grow(len(src)) | ||||
| 	b := dst.AvailableBuffer() | ||||
| 	b, err := appendCompact(b, src, false) | ||||
| 	dst.Write(b) | ||||
| 	return err | ||||
| } | ||||
|  | ||||
| func compact(dst *bytes.Buffer, src []byte, escape bool) error { | ||||
| 	origLen := dst.Len() | ||||
| func appendCompact(dst, src []byte, escape bool) ([]byte, error) { | ||||
| 	origLen := len(dst) | ||||
| 	scan := newScanner() | ||||
| 	defer freeScanner(scan) | ||||
| 	start := 0 | ||||
| 	for i, c := range src { | ||||
| 		if escape && (c == '<' || c == '>' || c == '&') { | ||||
| 			if start < i { | ||||
| 				dst.Write(src[start:i]) | ||||
| 				dst = append(dst, src[start:i]...) | ||||
| 			} | ||||
| 			dst.WriteString(`\u00`) | ||||
| 			dst.WriteByte(hex[c>>4]) | ||||
| 			dst.WriteByte(hex[c&0xF]) | ||||
| 			dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF]) | ||||
| 			start = i + 1 | ||||
| 		} | ||||
| 		// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9). | ||||
| 		if escape && c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 { | ||||
| 			if start < i { | ||||
| 				dst.Write(src[start:i]) | ||||
| 				dst = append(dst, src[start:i]...) | ||||
| 			} | ||||
| 			dst.WriteString(`\u202`) | ||||
| 			dst.WriteByte(hex[src[i+2]&0xF]) | ||||
| 			dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF]) | ||||
| 			start = i + 3 | ||||
| 		} | ||||
| 		v := scan.step(scan, c) | ||||
| @@ -44,29 +73,37 @@ func compact(dst *bytes.Buffer, src []byte, escape bool) error { | ||||
| 				break | ||||
| 			} | ||||
| 			if start < i { | ||||
| 				dst.Write(src[start:i]) | ||||
| 				dst = append(dst, src[start:i]...) | ||||
| 			} | ||||
| 			start = i + 1 | ||||
| 		} | ||||
| 	} | ||||
| 	if scan.eof() == scanError { | ||||
| 		dst.Truncate(origLen) | ||||
| 		return scan.err | ||||
| 		return dst[:origLen], scan.err | ||||
| 	} | ||||
| 	if start < len(src) { | ||||
| 		dst.Write(src[start:]) | ||||
| 		dst = append(dst, src[start:]...) | ||||
| 	} | ||||
| 	return nil | ||||
| 	return dst, nil | ||||
| } | ||||
|  | ||||
| func newline(dst *bytes.Buffer, prefix, indent string, depth int) { | ||||
| 	dst.WriteByte('\n') | ||||
| 	dst.WriteString(prefix) | ||||
| func appendNewline(dst []byte, prefix, indent string, depth int) []byte { | ||||
| 	dst = append(dst, '\n') | ||||
| 	dst = append(dst, prefix...) | ||||
| 	for i := 0; i < depth; i++ { | ||||
| 		dst.WriteString(indent) | ||||
| 		dst = append(dst, indent...) | ||||
| 	} | ||||
| 	return dst | ||||
| } | ||||
|  | ||||
| // indentGrowthFactor specifies the growth factor of indenting JSON input. | ||||
| // Empirically, the growth factor was measured to be between 1.4x to 1.8x | ||||
| // for some set of compacted JSON with the indent being a single tab. | ||||
| // Specify a growth factor slightly larger than what is observed | ||||
| // to reduce probability of allocation in appendIndent. | ||||
| // A factor no higher than 2 ensures that wasted space never exceeds 50%. | ||||
| const indentGrowthFactor = 2 | ||||
|  | ||||
| // Indent appends to dst an indented form of the JSON-encoded src. | ||||
| // Each element in a JSON object or array begins on a new, | ||||
| // indented line beginning with prefix followed by one or more | ||||
| @@ -79,7 +116,15 @@ func newline(dst *bytes.Buffer, prefix, indent string, depth int) { | ||||
| // For example, if src has no trailing spaces, neither will dst; | ||||
| // if src ends in a trailing newline, so will dst. | ||||
| func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error { | ||||
| 	origLen := dst.Len() | ||||
| 	dst.Grow(indentGrowthFactor * len(src)) | ||||
| 	b := dst.AvailableBuffer() | ||||
| 	b, err := appendIndent(b, src, prefix, indent) | ||||
| 	dst.Write(b) | ||||
| 	return err | ||||
| } | ||||
|  | ||||
| func appendIndent(dst, src []byte, prefix, indent string) ([]byte, error) { | ||||
| 	origLen := len(dst) | ||||
| 	scan := newScanner() | ||||
| 	defer freeScanner(scan) | ||||
| 	needIndent := false | ||||
| @@ -96,13 +141,13 @@ func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error { | ||||
| 		if needIndent && v != scanEndObject && v != scanEndArray { | ||||
| 			needIndent = false | ||||
| 			depth++ | ||||
| 			newline(dst, prefix, indent, depth) | ||||
| 			dst = appendNewline(dst, prefix, indent, depth) | ||||
| 		} | ||||
|  | ||||
| 		// Emit semantically uninteresting bytes | ||||
| 		// (in particular, punctuation in strings) unmodified. | ||||
| 		if v == scanContinue { | ||||
| 			dst.WriteByte(c) | ||||
| 			dst = append(dst, c) | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| @@ -111,33 +156,27 @@ func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error { | ||||
| 		case '{', '[': | ||||
| 			// delay indent so that empty object and array are formatted as {} and []. | ||||
| 			needIndent = true | ||||
| 			dst.WriteByte(c) | ||||
|  | ||||
| 			dst = append(dst, c) | ||||
| 		case ',': | ||||
| 			dst.WriteByte(c) | ||||
| 			newline(dst, prefix, indent, depth) | ||||
|  | ||||
| 			dst = append(dst, c) | ||||
| 			dst = appendNewline(dst, prefix, indent, depth) | ||||
| 		case ':': | ||||
| 			dst.WriteByte(c) | ||||
| 			dst.WriteByte(' ') | ||||
|  | ||||
| 			dst = append(dst, c, ' ') | ||||
| 		case '}', ']': | ||||
| 			if needIndent { | ||||
| 				// suppress indent in empty object/array | ||||
| 				needIndent = false | ||||
| 			} else { | ||||
| 				depth-- | ||||
| 				newline(dst, prefix, indent, depth) | ||||
| 				dst = appendNewline(dst, prefix, indent, depth) | ||||
| 			} | ||||
| 			dst.WriteByte(c) | ||||
|  | ||||
| 			dst = append(dst, c) | ||||
| 		default: | ||||
| 			dst.WriteByte(c) | ||||
| 			dst = append(dst, c) | ||||
| 		} | ||||
| 	} | ||||
| 	if scan.eof() == scanError { | ||||
| 		dst.Truncate(origLen) | ||||
| 		return scan.err | ||||
| 		return dst[:origLen], scan.err | ||||
| 	} | ||||
| 	return nil | ||||
| 	return dst, nil | ||||
| } | ||||
|   | ||||
| @@ -116,18 +116,3 @@ func TestNumberIsValid(t *testing.T) { | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func BenchmarkNumberIsValid(b *testing.B) { | ||||
| 	s := "-61657.61667E+61673" | ||||
| 	for i := 0; i < b.N; i++ { | ||||
| 		isValidNumber(s) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func BenchmarkNumberIsValidRegexp(b *testing.B) { | ||||
| 	var jsonNumberRegexp = regexp.MustCompile(`^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$`) | ||||
| 	s := "-61657.61667E+61673" | ||||
| 	for i := 0; i < b.N; i++ { | ||||
| 		jsonNumberRegexp.MatchString(s) | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -43,7 +43,7 @@ func checkValid(data []byte, scan *scanner) error { | ||||
| } | ||||
|  | ||||
| // A SyntaxError is a description of a JSON syntax error. | ||||
| // Unmarshal will return a SyntaxError if the JSON can't be parsed. | ||||
| // [Unmarshal] will return a SyntaxError if the JSON can't be parsed. | ||||
| type SyntaxError struct { | ||||
| 	msg    string // description of error | ||||
| 	Offset int64  // error occurred after reading Offset bytes | ||||
| @@ -594,7 +594,7 @@ func (s *scanner) error(c byte, context string) int { | ||||
| 	return scanError | ||||
| } | ||||
|  | ||||
| // quoteChar formats c as a quoted character literal | ||||
| // quoteChar formats c as a quoted character literal. | ||||
| func quoteChar(c byte) string { | ||||
| 	// special cases - different from quoted strings | ||||
| 	if c == '\'' { | ||||
|   | ||||
| @@ -9,51 +9,59 @@ import ( | ||||
| 	"math" | ||||
| 	"math/rand" | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| var validTests = []struct { | ||||
| 	data string | ||||
| 	ok   bool | ||||
| }{ | ||||
| 	{`foo`, false}, | ||||
| 	{`}{`, false}, | ||||
| 	{`{]`, false}, | ||||
| 	{`{}`, true}, | ||||
| 	{`{"foo":"bar"}`, true}, | ||||
| 	{`{"foo":"bar","bar":{"baz":["qux"]}}`, true}, | ||||
| func indentNewlines(s string) string { | ||||
| 	return strings.Join(strings.Split(s, "\n"), "\n\t") | ||||
| } | ||||
|  | ||||
| func stripWhitespace(s string) string { | ||||
| 	return strings.Map(func(r rune) rune { | ||||
| 		if r == ' ' || r == '\n' || r == '\r' || r == '\t' { | ||||
| 			return -1 | ||||
| 		} | ||||
| 		return r | ||||
| 	}, s) | ||||
| } | ||||
|  | ||||
| func TestValid(t *testing.T) { | ||||
| 	for _, tt := range validTests { | ||||
| 		if ok := Valid([]byte(tt.data)); ok != tt.ok { | ||||
| 			t.Errorf("Valid(%#q) = %v, want %v", tt.data, ok, tt.ok) | ||||
| 		} | ||||
| 	tests := []struct { | ||||
| 		CaseName | ||||
| 		data string | ||||
| 		ok   bool | ||||
| 	}{ | ||||
| 		{Name(""), `foo`, false}, | ||||
| 		{Name(""), `}{`, false}, | ||||
| 		{Name(""), `{]`, false}, | ||||
| 		{Name(""), `{}`, true}, | ||||
| 		{Name(""), `{"foo":"bar"}`, true}, | ||||
| 		{Name(""), `{"foo":"bar","bar":{"baz":["qux"]}}`, true}, | ||||
| 	} | ||||
| 	for _, tt := range tests { | ||||
| 		t.Run(tt.Name, func(t *testing.T) { | ||||
| 			if ok := Valid([]byte(tt.data)); ok != tt.ok { | ||||
| 				t.Errorf("%s: Valid(`%s`) = %v, want %v", tt.Where, tt.data, ok, tt.ok) | ||||
| 			} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Tests of simple examples. | ||||
|  | ||||
| type example struct { | ||||
| 	compact string | ||||
| 	indent  string | ||||
| } | ||||
|  | ||||
| var examples = []example{ | ||||
| 	{`1`, `1`}, | ||||
| 	{`{}`, `{}`}, | ||||
| 	{`[]`, `[]`}, | ||||
| 	{`{"":2}`, "{\n\t\"\": 2\n}"}, | ||||
| 	{`[3]`, "[\n\t3\n]"}, | ||||
| 	{`[1,2,3]`, "[\n\t1,\n\t2,\n\t3\n]"}, | ||||
| 	{`{"x":1}`, "{\n\t\"x\": 1\n}"}, | ||||
| 	{ex1, ex1i}, | ||||
| 	{"{\"\":\"<>&\u2028\u2029\"}", "{\n\t\"\": \"<>&\u2028\u2029\"\n}"}, // See golang.org/issue/34070 | ||||
| } | ||||
|  | ||||
| var ex1 = `[true,false,null,"x",1,1.5,0,-5e+2]` | ||||
|  | ||||
| var ex1i = `[ | ||||
| func TestCompactAndIndent(t *testing.T) { | ||||
| 	tests := []struct { | ||||
| 		CaseName | ||||
| 		compact string | ||||
| 		indent  string | ||||
| 	}{ | ||||
| 		{Name(""), `1`, `1`}, | ||||
| 		{Name(""), `{}`, `{}`}, | ||||
| 		{Name(""), `[]`, `[]`}, | ||||
| 		{Name(""), `{"":2}`, "{\n\t\"\": 2\n}"}, | ||||
| 		{Name(""), `[3]`, "[\n\t3\n]"}, | ||||
| 		{Name(""), `[1,2,3]`, "[\n\t1,\n\t2,\n\t3\n]"}, | ||||
| 		{Name(""), `{"x":1}`, "{\n\t\"x\": 1\n}"}, | ||||
| 		{Name(""), `[true,false,null,"x",1,1.5,0,-5e+2]`, `[ | ||||
| 	true, | ||||
| 	false, | ||||
| 	null, | ||||
| @@ -62,25 +70,40 @@ var ex1i = `[ | ||||
| 	1.5, | ||||
| 	0, | ||||
| 	-5e+2 | ||||
| ]` | ||||
|  | ||||
| func TestCompact(t *testing.T) { | ||||
| ]`}, | ||||
| 		{Name(""), "{\"\":\"<>&\u2028\u2029\"}", "{\n\t\"\": \"<>&\u2028\u2029\"\n}"}, // See golang.org/issue/34070 | ||||
| 	} | ||||
| 	var buf bytes.Buffer | ||||
| 	for _, tt := range examples { | ||||
| 		buf.Reset() | ||||
| 		if err := Compact(&buf, []byte(tt.compact)); err != nil { | ||||
| 			t.Errorf("Compact(%#q): %v", tt.compact, err) | ||||
| 		} else if s := buf.String(); s != tt.compact { | ||||
| 			t.Errorf("Compact(%#q) = %#q, want original", tt.compact, s) | ||||
| 		} | ||||
| 	for _, tt := range tests { | ||||
| 		t.Run(tt.Name, func(t *testing.T) { | ||||
| 			buf.Reset() | ||||
| 			if err := Compact(&buf, []byte(tt.compact)); err != nil { | ||||
| 				t.Errorf("%s: Compact error: %v", tt.Where, err) | ||||
| 			} else if got := buf.String(); got != tt.compact { | ||||
| 				t.Errorf("%s: Compact:\n\tgot:  %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact)) | ||||
| 			} | ||||
|  | ||||
| 		buf.Reset() | ||||
| 		if err := Compact(&buf, []byte(tt.indent)); err != nil { | ||||
| 			t.Errorf("Compact(%#q): %v", tt.indent, err) | ||||
| 			continue | ||||
| 		} else if s := buf.String(); s != tt.compact { | ||||
| 			t.Errorf("Compact(%#q) = %#q, want %#q", tt.indent, s, tt.compact) | ||||
| 		} | ||||
| 			buf.Reset() | ||||
| 			if err := Compact(&buf, []byte(tt.indent)); err != nil { | ||||
| 				t.Errorf("%s: Compact error: %v", tt.Where, err) | ||||
| 			} else if got := buf.String(); got != tt.compact { | ||||
| 				t.Errorf("%s: Compact:\n\tgot:  %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact)) | ||||
| 			} | ||||
|  | ||||
| 			buf.Reset() | ||||
| 			if err := Indent(&buf, []byte(tt.indent), "", "\t"); err != nil { | ||||
| 				t.Errorf("%s: Indent error: %v", tt.Where, err) | ||||
| 			} else if got := buf.String(); got != tt.indent { | ||||
| 				t.Errorf("%s: Compact:\n\tgot:  %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.indent)) | ||||
| 			} | ||||
|  | ||||
| 			buf.Reset() | ||||
| 			if err := Indent(&buf, []byte(tt.compact), "", "\t"); err != nil { | ||||
| 				t.Errorf("%s: Indent error: %v", tt.Where, err) | ||||
| 			} else if got := buf.String(); got != tt.indent { | ||||
| 				t.Errorf("%s: Compact:\n\tgot:  %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.indent)) | ||||
| 			} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -88,38 +111,21 @@ func TestCompactSeparators(t *testing.T) { | ||||
| 	// U+2028 and U+2029 should be escaped inside strings. | ||||
| 	// They should not appear outside strings. | ||||
| 	tests := []struct { | ||||
| 		CaseName | ||||
| 		in, compact string | ||||
| 	}{ | ||||
| 		{"{\"\u2028\": 1}", "{\"\u2028\":1}"}, | ||||
| 		{"{\"\u2029\" :2}", "{\"\u2029\":2}"}, | ||||
| 		{Name(""), "{\"\u2028\": 1}", "{\"\u2028\":1}"}, | ||||
| 		{Name(""), "{\"\u2029\" :2}", "{\"\u2029\":2}"}, | ||||
| 	} | ||||
| 	for _, tt := range tests { | ||||
| 		var buf bytes.Buffer | ||||
| 		if err := Compact(&buf, []byte(tt.in)); err != nil { | ||||
| 			t.Errorf("Compact(%q): %v", tt.in, err) | ||||
| 		} else if s := buf.String(); s != tt.compact { | ||||
| 			t.Errorf("Compact(%q) = %q, want %q", tt.in, s, tt.compact) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestIndent(t *testing.T) { | ||||
| 	var buf bytes.Buffer | ||||
| 	for _, tt := range examples { | ||||
| 		buf.Reset() | ||||
| 		if err := Indent(&buf, []byte(tt.indent), "", "\t"); err != nil { | ||||
| 			t.Errorf("Indent(%#q): %v", tt.indent, err) | ||||
| 		} else if s := buf.String(); s != tt.indent { | ||||
| 			t.Errorf("Indent(%#q) = %#q, want original", tt.indent, s) | ||||
| 		} | ||||
|  | ||||
| 		buf.Reset() | ||||
| 		if err := Indent(&buf, []byte(tt.compact), "", "\t"); err != nil { | ||||
| 			t.Errorf("Indent(%#q): %v", tt.compact, err) | ||||
| 			continue | ||||
| 		} else if s := buf.String(); s != tt.indent { | ||||
| 			t.Errorf("Indent(%#q) = %#q, want %#q", tt.compact, s, tt.indent) | ||||
| 		} | ||||
| 		t.Run(tt.Name, func(t *testing.T) { | ||||
| 			var buf bytes.Buffer | ||||
| 			if err := Compact(&buf, []byte(tt.in)); err != nil { | ||||
| 				t.Errorf("%s: Compact error: %v", tt.Where, err) | ||||
| 			} else if got := buf.String(); got != tt.compact { | ||||
| 				t.Errorf("%s: Compact:\n\tgot:  %s\n\twant: %s", tt.Where, indentNewlines(got), indentNewlines(tt.compact)) | ||||
| 			} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -129,11 +135,11 @@ func TestCompactBig(t *testing.T) { | ||||
| 	initBig() | ||||
| 	var buf bytes.Buffer | ||||
| 	if err := Compact(&buf, jsonBig); err != nil { | ||||
| 		t.Fatalf("Compact: %v", err) | ||||
| 		t.Fatalf("Compact error: %v", err) | ||||
| 	} | ||||
| 	b := buf.Bytes() | ||||
| 	if !bytes.Equal(b, jsonBig) { | ||||
| 		t.Error("Compact(jsonBig) != jsonBig") | ||||
| 		t.Error("Compact:") | ||||
| 		diff(t, b, jsonBig) | ||||
| 		return | ||||
| 	} | ||||
| @@ -144,23 +150,23 @@ func TestIndentBig(t *testing.T) { | ||||
| 	initBig() | ||||
| 	var buf bytes.Buffer | ||||
| 	if err := Indent(&buf, jsonBig, "", "\t"); err != nil { | ||||
| 		t.Fatalf("Indent1: %v", err) | ||||
| 		t.Fatalf("Indent error: %v", err) | ||||
| 	} | ||||
| 	b := buf.Bytes() | ||||
| 	if len(b) == len(jsonBig) { | ||||
| 		// jsonBig is compact (no unnecessary spaces); | ||||
| 		// indenting should make it bigger | ||||
| 		t.Fatalf("Indent(jsonBig) did not get bigger") | ||||
| 		t.Fatalf("Indent did not expand the input") | ||||
| 	} | ||||
|  | ||||
| 	// should be idempotent | ||||
| 	var buf1 bytes.Buffer | ||||
| 	if err := Indent(&buf1, b, "", "\t"); err != nil { | ||||
| 		t.Fatalf("Indent2: %v", err) | ||||
| 		t.Fatalf("Indent error: %v", err) | ||||
| 	} | ||||
| 	b1 := buf1.Bytes() | ||||
| 	if !bytes.Equal(b1, b) { | ||||
| 		t.Error("Indent(Indent(jsonBig)) != Indent(jsonBig)") | ||||
| 		t.Error("Indent(Indent(jsonBig)) != Indent(jsonBig):") | ||||
| 		diff(t, b1, b) | ||||
| 		return | ||||
| 	} | ||||
| @@ -168,40 +174,40 @@ func TestIndentBig(t *testing.T) { | ||||
| 	// should get back to original | ||||
| 	buf1.Reset() | ||||
| 	if err := Compact(&buf1, b); err != nil { | ||||
| 		t.Fatalf("Compact: %v", err) | ||||
| 		t.Fatalf("Compact error: %v", err) | ||||
| 	} | ||||
| 	b1 = buf1.Bytes() | ||||
| 	if !bytes.Equal(b1, jsonBig) { | ||||
| 		t.Error("Compact(Indent(jsonBig)) != jsonBig") | ||||
| 		t.Error("Compact(Indent(jsonBig)) != jsonBig:") | ||||
| 		diff(t, b1, jsonBig) | ||||
| 		return | ||||
| 	} | ||||
| } | ||||
|  | ||||
| type indentErrorTest struct { | ||||
| 	in  string | ||||
| 	err error | ||||
| } | ||||
|  | ||||
| var indentErrorTests = []indentErrorTest{ | ||||
| 	{`{"X": "foo", "Y"}`, &SyntaxError{"invalid character '}' after object key", 17}}, | ||||
| 	{`{"X": "foo" "Y": "bar"}`, &SyntaxError{"invalid character '\"' after object key:value pair", 13}}, | ||||
| } | ||||
|  | ||||
| func TestIndentErrors(t *testing.T) { | ||||
| 	for i, tt := range indentErrorTests { | ||||
| 		slice := make([]uint8, 0) | ||||
| 		buf := bytes.NewBuffer(slice) | ||||
| 		if err := Indent(buf, []uint8(tt.in), "", ""); err != nil { | ||||
| 			if !reflect.DeepEqual(err, tt.err) { | ||||
| 				t.Errorf("#%d: Indent: %#v", i, err) | ||||
| 				continue | ||||
| 	tests := []struct { | ||||
| 		CaseName | ||||
| 		in  string | ||||
| 		err error | ||||
| 	}{ | ||||
| 		{Name(""), `{"X": "foo", "Y"}`, &SyntaxError{"invalid character '}' after object key", 17}}, | ||||
| 		{Name(""), `{"X": "foo" "Y": "bar"}`, &SyntaxError{"invalid character '\"' after object key:value pair", 13}}, | ||||
| 	} | ||||
| 	for _, tt := range tests { | ||||
| 		t.Run(tt.Name, func(t *testing.T) { | ||||
| 			slice := make([]uint8, 0) | ||||
| 			buf := bytes.NewBuffer(slice) | ||||
| 			if err := Indent(buf, []uint8(tt.in), "", ""); err != nil { | ||||
| 				if !reflect.DeepEqual(err, tt.err) { | ||||
| 					t.Fatalf("%s: Indent error:\n\tgot:  %v\n\twant: %v", tt.Where, err, tt.err) | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func diff(t *testing.T, a, b []byte) { | ||||
| 	t.Helper() | ||||
| 	for i := 0; ; i++ { | ||||
| 		if i >= len(a) || i >= len(b) || a[i] != b[i] { | ||||
| 			j := i - 10 | ||||
| @@ -215,10 +221,7 @@ func diff(t *testing.T, a, b []byte) { | ||||
| } | ||||
|  | ||||
| func trim(b []byte) []byte { | ||||
| 	if len(b) > 20 { | ||||
| 		return b[0:20] | ||||
| 	} | ||||
| 	return b | ||||
| 	return b[:min(len(b), 20)] | ||||
| } | ||||
|  | ||||
| // Generate a random JSON object. | ||||
|   | ||||
| @@ -33,7 +33,7 @@ func NewDecoder(r io.Reader) *Decoder { | ||||
| } | ||||
|  | ||||
| // UseNumber causes the Decoder to unmarshal a number into an interface{} as a | ||||
| // Number instead of as a float64. | ||||
| // [Number] instead of as a float64. | ||||
| func (dec *Decoder) UseNumber() { dec.d.useNumber = true } | ||||
|  | ||||
| // DisallowUnknownFields causes the Decoder to return an error when the destination | ||||
| @@ -47,7 +47,7 @@ func (dec *Decoder) TagKey(v string) { dec.d.tagkey = &v } | ||||
| // Decode reads the next JSON-encoded value from its | ||||
| // input and stores it in the value pointed to by v. | ||||
| // | ||||
| // See the documentation for Unmarshal for details about | ||||
| // See the documentation for [Unmarshal] for details about | ||||
| // the conversion of JSON into a Go value. | ||||
| func (dec *Decoder) Decode(v any) error { | ||||
| 	if dec.err != nil { | ||||
| @@ -82,7 +82,7 @@ func (dec *Decoder) Decode(v any) error { | ||||
| } | ||||
|  | ||||
| // Buffered returns a reader of the data remaining in the Decoder's | ||||
| // buffer. The reader is valid until the next call to Decode. | ||||
| // buffer. The reader is valid until the next call to [Decoder.Decode]. | ||||
| func (dec *Decoder) Buffered() io.Reader { | ||||
| 	return bytes.NewReader(dec.buf[dec.scanp:]) | ||||
| } | ||||
| @@ -186,7 +186,7 @@ type Encoder struct { | ||||
| 	err        error | ||||
| 	escapeHTML bool | ||||
|  | ||||
| 	indentBuf    *bytes.Buffer | ||||
| 	indentBuf    []byte | ||||
| 	indentPrefix string | ||||
| 	indentValue  string | ||||
| } | ||||
| @@ -197,15 +197,19 @@ func NewEncoder(w io.Writer) *Encoder { | ||||
| } | ||||
|  | ||||
| // Encode writes the JSON encoding of v to the stream, | ||||
| // with insignificant space characters elided, | ||||
| // followed by a newline character. | ||||
| // | ||||
| // See the documentation for Marshal for details about the | ||||
| // See the documentation for [Marshal] for details about the | ||||
| // conversion of Go values to JSON. | ||||
| func (enc *Encoder) Encode(v any) error { | ||||
| 	if enc.err != nil { | ||||
| 		return enc.err | ||||
| 	} | ||||
|  | ||||
| 	e := newEncodeState() | ||||
| 	defer encodeStatePool.Put(e) | ||||
|  | ||||
| 	err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML}) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| @@ -221,20 +225,15 @@ func (enc *Encoder) Encode(v any) error { | ||||
|  | ||||
| 	b := e.Bytes() | ||||
| 	if enc.indentPrefix != "" || enc.indentValue != "" { | ||||
| 		if enc.indentBuf == nil { | ||||
| 			enc.indentBuf = new(bytes.Buffer) | ||||
| 		} | ||||
| 		enc.indentBuf.Reset() | ||||
| 		err = Indent(enc.indentBuf, b, enc.indentPrefix, enc.indentValue) | ||||
| 		enc.indentBuf, err = appendIndent(enc.indentBuf[:0], b, enc.indentPrefix, enc.indentValue) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		b = enc.indentBuf.Bytes() | ||||
| 		b = enc.indentBuf | ||||
| 	} | ||||
| 	if _, err = enc.w.Write(b); err != nil { | ||||
| 		enc.err = err | ||||
| 	} | ||||
| 	encodeStatePool.Put(e) | ||||
| 	return err | ||||
| } | ||||
|  | ||||
| @@ -258,7 +257,7 @@ func (enc *Encoder) SetEscapeHTML(on bool) { | ||||
| } | ||||
|  | ||||
| // RawMessage is a raw encoded JSON value. | ||||
| // It implements Marshaler and Unmarshaler and can | ||||
| // It implements [Marshaler] and [Unmarshaler] and can | ||||
| // be used to delay JSON decoding or precompute a JSON encoding. | ||||
| type RawMessage []byte | ||||
|  | ||||
| @@ -284,12 +283,12 @@ var _ Unmarshaler = (*RawMessage)(nil) | ||||
|  | ||||
| // A Token holds a value of one of these types: | ||||
| // | ||||
| //	Delim, for the four JSON delimiters [ ] { } | ||||
| //	bool, for JSON booleans | ||||
| //	float64, for JSON numbers | ||||
| //	Number, for JSON numbers | ||||
| //	string, for JSON string literals | ||||
| //	nil, for JSON null | ||||
| //   - [Delim], for the four JSON delimiters [ ] { } | ||||
| //   - bool, for JSON booleans | ||||
| //   - float64, for JSON numbers | ||||
| //   - [Number], for JSON numbers | ||||
| //   - string, for JSON string literals | ||||
| //   - nil, for JSON null | ||||
| type Token any | ||||
|  | ||||
| const ( | ||||
| @@ -359,14 +358,14 @@ func (d Delim) String() string { | ||||
| } | ||||
|  | ||||
| // Token returns the next JSON token in the input stream. | ||||
| // At the end of the input stream, Token returns nil, io.EOF. | ||||
| // At the end of the input stream, Token returns nil, [io.EOF]. | ||||
| // | ||||
| // Token guarantees that the delimiters [ ] { } it returns are | ||||
| // properly nested and matched: if Token encounters an unexpected | ||||
| // delimiter in the input, it will return an error. | ||||
| // | ||||
| // The input stream consists of basic JSON values—bool, string, | ||||
| // number, and null—along with delimiters [ ] { } of type Delim | ||||
| // number, and null—along with delimiters [ ] { } of type [Delim] | ||||
| // to mark the start and end of arrays and objects. | ||||
| // Commas and colons are elided. | ||||
| func (dec *Decoder) Token() (Token, error) { | ||||
|   | ||||
| @@ -6,16 +6,44 @@ package json | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"fmt" | ||||
| 	"io" | ||||
| 	"log" | ||||
| 	"net" | ||||
| 	"net/http" | ||||
| 	"net/http/httptest" | ||||
| 	"path" | ||||
| 	"reflect" | ||||
| 	"runtime" | ||||
| 	"runtime/debug" | ||||
| 	"strings" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| // TODO(https://go.dev/issue/52751): Replace with native testing support. | ||||
|  | ||||
| // CaseName is a case name annotated with a file and line. | ||||
| type CaseName struct { | ||||
| 	Name  string | ||||
| 	Where CasePos | ||||
| } | ||||
|  | ||||
| // Name annotates a case name with the file and line of the caller. | ||||
| func Name(s string) (c CaseName) { | ||||
| 	c.Name = s | ||||
| 	runtime.Callers(2, c.Where.pc[:]) | ||||
| 	return c | ||||
| } | ||||
|  | ||||
| // CasePos represents a file and line number. | ||||
| type CasePos struct{ pc [1]uintptr } | ||||
|  | ||||
| func (pos CasePos) String() string { | ||||
| 	frames := runtime.CallersFrames(pos.pc[:]) | ||||
| 	frame, _ := frames.Next() | ||||
| 	return fmt.Sprintf("%s:%d", path.Base(frame.File), frame.Line) | ||||
| } | ||||
|  | ||||
| // Test values for the stream test. | ||||
| // One of each JSON kind. | ||||
| var streamTest = []any{ | ||||
| @@ -41,24 +69,61 @@ false | ||||
|  | ||||
| func TestEncoder(t *testing.T) { | ||||
| 	for i := 0; i <= len(streamTest); i++ { | ||||
| 		var buf bytes.Buffer | ||||
| 		var buf strings.Builder | ||||
| 		enc := NewEncoder(&buf) | ||||
| 		// Check that enc.SetIndent("", "") turns off indentation. | ||||
| 		enc.SetIndent(">", ".") | ||||
| 		enc.SetIndent("", "") | ||||
| 		for j, v := range streamTest[0:i] { | ||||
| 			if err := enc.Encode(v); err != nil { | ||||
| 				t.Fatalf("encode #%d: %v", j, err) | ||||
| 				t.Fatalf("#%d.%d Encode error: %v", i, j, err) | ||||
| 			} | ||||
| 		} | ||||
| 		if have, want := buf.String(), nlines(streamEncoded, i); have != want { | ||||
| 			t.Errorf("encoding %d items: mismatch", i) | ||||
| 			t.Errorf("encoding %d items: mismatch:", i) | ||||
| 			diff(t, []byte(have), []byte(want)) | ||||
| 			break | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestEncoderErrorAndReuseEncodeState(t *testing.T) { | ||||
| 	// Disable the GC temporarily to prevent encodeState's in Pool being cleaned away during the test. | ||||
| 	percent := debug.SetGCPercent(-1) | ||||
| 	defer debug.SetGCPercent(percent) | ||||
|  | ||||
| 	// Trigger an error in Marshal with cyclic data. | ||||
| 	type Dummy struct { | ||||
| 		Name string | ||||
| 		Next *Dummy | ||||
| 	} | ||||
| 	dummy := Dummy{Name: "Dummy"} | ||||
| 	dummy.Next = &dummy | ||||
|  | ||||
| 	var buf bytes.Buffer | ||||
| 	enc := NewEncoder(&buf) | ||||
| 	if err := enc.Encode(dummy); err == nil { | ||||
| 		t.Errorf("Encode(dummy) error: got nil, want non-nil") | ||||
| 	} | ||||
|  | ||||
| 	type Data struct { | ||||
| 		A string | ||||
| 		I int | ||||
| 	} | ||||
| 	want := Data{A: "a", I: 1} | ||||
| 	if err := enc.Encode(want); err != nil { | ||||
| 		t.Errorf("Marshal error: %v", err) | ||||
| 	} | ||||
|  | ||||
| 	var got Data | ||||
| 	if err := Unmarshal(buf.Bytes(), &got); err != nil { | ||||
| 		t.Errorf("Unmarshal error: %v", err) | ||||
| 	} | ||||
| 	if got != want { | ||||
| 		t.Errorf("Marshal/Unmarshal roundtrip:\n\tgot:  %v\n\twant: %v", got, want) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| var streamEncodedIndent = `0.1 | ||||
| "hello" | ||||
| null | ||||
| @@ -77,14 +142,14 @@ false | ||||
| ` | ||||
|  | ||||
| func TestEncoderIndent(t *testing.T) { | ||||
| 	var buf bytes.Buffer | ||||
| 	var buf strings.Builder | ||||
| 	enc := NewEncoder(&buf) | ||||
| 	enc.SetIndent(">", ".") | ||||
| 	for _, v := range streamTest { | ||||
| 		enc.Encode(v) | ||||
| 	} | ||||
| 	if have, want := buf.String(), streamEncodedIndent; have != want { | ||||
| 		t.Error("indented encoding mismatch") | ||||
| 		t.Error("Encode mismatch:") | ||||
| 		diff(t, []byte(have), []byte(want)) | ||||
| 	} | ||||
| } | ||||
| @@ -122,50 +187,51 @@ func TestEncoderSetEscapeHTML(t *testing.T) { | ||||
| 		Bar string `json:"bar,string"` | ||||
| 	}{`<html>foobar</html>`} | ||||
|  | ||||
| 	for _, tt := range []struct { | ||||
| 		name       string | ||||
| 	tests := []struct { | ||||
| 		CaseName | ||||
| 		v          any | ||||
| 		wantEscape string | ||||
| 		want       string | ||||
| 	}{ | ||||
| 		{"c", c, `"\u003c\u0026\u003e"`, `"<&>"`}, | ||||
| 		{"ct", ct, `"\"\u003c\u0026\u003e\""`, `"\"<&>\""`}, | ||||
| 		{`"<&>"`, "<&>", `"\u003c\u0026\u003e"`, `"<&>"`}, | ||||
| 		{Name("c"), c, `"\u003c\u0026\u003e"`, `"<&>"`}, | ||||
| 		{Name("ct"), ct, `"\"\u003c\u0026\u003e\""`, `"\"<&>\""`}, | ||||
| 		{Name(`"<&>"`), "<&>", `"\u003c\u0026\u003e"`, `"<&>"`}, | ||||
| 		{ | ||||
| 			"tagStruct", tagStruct, | ||||
| 			Name("tagStruct"), tagStruct, | ||||
| 			`{"\u003c\u003e\u0026#! ":0,"Invalid":0}`, | ||||
| 			`{"<>&#! ":0,"Invalid":0}`, | ||||
| 		}, | ||||
| 		{ | ||||
| 			`"<str>"`, marshalerStruct, | ||||
| 			Name(`"<str>"`), marshalerStruct, | ||||
| 			`{"NonPtr":"\u003cstr\u003e","Ptr":"\u003cstr\u003e"}`, | ||||
| 			`{"NonPtr":"<str>","Ptr":"<str>"}`, | ||||
| 		}, | ||||
| 		{ | ||||
| 			"stringOption", stringOption, | ||||
| 			Name("stringOption"), stringOption, | ||||
| 			`{"bar":"\"\\u003chtml\\u003efoobar\\u003c/html\\u003e\""}`, | ||||
| 			`{"bar":"\"<html>foobar</html>\""}`, | ||||
| 		}, | ||||
| 	} { | ||||
| 		var buf bytes.Buffer | ||||
| 		enc := NewEncoder(&buf) | ||||
| 		if err := enc.Encode(tt.v); err != nil { | ||||
| 			t.Errorf("Encode(%s): %s", tt.name, err) | ||||
| 			continue | ||||
| 		} | ||||
| 		if got := strings.TrimSpace(buf.String()); got != tt.wantEscape { | ||||
| 			t.Errorf("Encode(%s) = %#q, want %#q", tt.name, got, tt.wantEscape) | ||||
| 		} | ||||
| 		buf.Reset() | ||||
| 		enc.SetEscapeHTML(false) | ||||
| 		if err := enc.Encode(tt.v); err != nil { | ||||
| 			t.Errorf("SetEscapeHTML(false) Encode(%s): %s", tt.name, err) | ||||
| 			continue | ||||
| 		} | ||||
| 		if got := strings.TrimSpace(buf.String()); got != tt.want { | ||||
| 			t.Errorf("SetEscapeHTML(false) Encode(%s) = %#q, want %#q", | ||||
| 				tt.name, got, tt.want) | ||||
| 		} | ||||
| 	} | ||||
| 	for _, tt := range tests { | ||||
| 		t.Run(tt.Name, func(t *testing.T) { | ||||
| 			var buf strings.Builder | ||||
| 			enc := NewEncoder(&buf) | ||||
| 			if err := enc.Encode(tt.v); err != nil { | ||||
| 				t.Fatalf("%s: Encode(%s) error: %s", tt.Where, tt.Name, err) | ||||
| 			} | ||||
| 			if got := strings.TrimSpace(buf.String()); got != tt.wantEscape { | ||||
| 				t.Errorf("%s: Encode(%s):\n\tgot:  %s\n\twant: %s", tt.Where, tt.Name, got, tt.wantEscape) | ||||
| 			} | ||||
| 			buf.Reset() | ||||
| 			enc.SetEscapeHTML(false) | ||||
| 			if err := enc.Encode(tt.v); err != nil { | ||||
| 				t.Fatalf("%s: SetEscapeHTML(false) Encode(%s) error: %s", tt.Where, tt.Name, err) | ||||
| 			} | ||||
| 			if got := strings.TrimSpace(buf.String()); got != tt.want { | ||||
| 				t.Errorf("%s: SetEscapeHTML(false) Encode(%s):\n\tgot:  %s\n\twant: %s", | ||||
| 					tt.Where, tt.Name, got, tt.want) | ||||
| 			} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -186,14 +252,14 @@ func TestDecoder(t *testing.T) { | ||||
| 		dec := NewDecoder(&buf) | ||||
| 		for j := range out { | ||||
| 			if err := dec.Decode(&out[j]); err != nil { | ||||
| 				t.Fatalf("decode #%d/%d: %v", j, i, err) | ||||
| 				t.Fatalf("decode #%d/%d error: %v", j, i, err) | ||||
| 			} | ||||
| 		} | ||||
| 		if !reflect.DeepEqual(out, streamTest[0:i]) { | ||||
| 			t.Errorf("decoding %d items: mismatch", i) | ||||
| 			t.Errorf("decoding %d items: mismatch:", i) | ||||
| 			for j := range out { | ||||
| 				if !reflect.DeepEqual(out[j], streamTest[j]) { | ||||
| 					t.Errorf("#%d: have %v want %v", j, out[j], streamTest[j]) | ||||
| 					t.Errorf("#%d:\n\tgot:  %v\n\twant: %v", j, out[j], streamTest[j]) | ||||
| 				} | ||||
| 			} | ||||
| 			break | ||||
| @@ -212,14 +278,14 @@ func TestDecoderBuffered(t *testing.T) { | ||||
| 		t.Fatal(err) | ||||
| 	} | ||||
| 	if m.Name != "Gopher" { | ||||
| 		t.Errorf("Name = %q; want Gopher", m.Name) | ||||
| 		t.Errorf("Name = %s, want Gopher", m.Name) | ||||
| 	} | ||||
| 	rest, err := io.ReadAll(d.Buffered()) | ||||
| 	if err != nil { | ||||
| 		t.Fatal(err) | ||||
| 	} | ||||
| 	if g, w := string(rest), " extra "; g != w { | ||||
| 		t.Errorf("Remaining = %q; want %q", g, w) | ||||
| 	if got, want := string(rest), " extra "; got != want { | ||||
| 		t.Errorf("Remaining = %s, want %s", got, want) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -244,20 +310,20 @@ func TestRawMessage(t *testing.T) { | ||||
| 		Y  float32 | ||||
| 	} | ||||
| 	const raw = `["\u0056",null]` | ||||
| 	const msg = `{"X":0.1,"Id":["\u0056",null],"Y":0.2}` | ||||
| 	err := Unmarshal([]byte(msg), &data) | ||||
| 	const want = `{"X":0.1,"Id":["\u0056",null],"Y":0.2}` | ||||
| 	err := Unmarshal([]byte(want), &data) | ||||
| 	if err != nil { | ||||
| 		t.Fatalf("Unmarshal: %v", err) | ||||
| 		t.Fatalf("Unmarshal error: %v", err) | ||||
| 	} | ||||
| 	if string([]byte(data.Id)) != raw { | ||||
| 		t.Fatalf("Raw mismatch: have %#q want %#q", []byte(data.Id), raw) | ||||
| 		t.Fatalf("Unmarshal:\n\tgot:  %s\n\twant: %s", []byte(data.Id), raw) | ||||
| 	} | ||||
| 	b, err := Marshal(&data) | ||||
| 	got, err := Marshal(&data) | ||||
| 	if err != nil { | ||||
| 		t.Fatalf("Marshal: %v", err) | ||||
| 		t.Fatalf("Marshal error: %v", err) | ||||
| 	} | ||||
| 	if string(b) != msg { | ||||
| 		t.Fatalf("Marshal: have %#q want %#q", b, msg) | ||||
| 	if string(got) != want { | ||||
| 		t.Fatalf("Marshal:\n\tgot:  %s\n\twant: %s", got, want) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -268,174 +334,156 @@ func TestNullRawMessage(t *testing.T) { | ||||
| 		IdPtr *RawMessage | ||||
| 		Y     float32 | ||||
| 	} | ||||
| 	const msg = `{"X":0.1,"Id":null,"IdPtr":null,"Y":0.2}` | ||||
| 	err := Unmarshal([]byte(msg), &data) | ||||
| 	const want = `{"X":0.1,"Id":null,"IdPtr":null,"Y":0.2}` | ||||
| 	err := Unmarshal([]byte(want), &data) | ||||
| 	if err != nil { | ||||
| 		t.Fatalf("Unmarshal: %v", err) | ||||
| 		t.Fatalf("Unmarshal error: %v", err) | ||||
| 	} | ||||
| 	if want, got := "null", string(data.Id); want != got { | ||||
| 		t.Fatalf("Raw mismatch: have %q, want %q", got, want) | ||||
| 		t.Fatalf("Unmarshal:\n\tgot:  %s\n\twant: %s", got, want) | ||||
| 	} | ||||
| 	if data.IdPtr != nil { | ||||
| 		t.Fatalf("Raw pointer mismatch: have non-nil, want nil") | ||||
| 		t.Fatalf("pointer mismatch: got non-nil, want nil") | ||||
| 	} | ||||
| 	b, err := Marshal(&data) | ||||
| 	got, err := Marshal(&data) | ||||
| 	if err != nil { | ||||
| 		t.Fatalf("Marshal: %v", err) | ||||
| 		t.Fatalf("Marshal error: %v", err) | ||||
| 	} | ||||
| 	if string(b) != msg { | ||||
| 		t.Fatalf("Marshal: have %#q want %#q", b, msg) | ||||
| 	if string(got) != want { | ||||
| 		t.Fatalf("Marshal:\n\tgot:  %s\n\twant: %s", got, want) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| var blockingTests = []string{ | ||||
| 	`{"x": 1}`, | ||||
| 	`[1, 2, 3]`, | ||||
| } | ||||
|  | ||||
| func TestBlocking(t *testing.T) { | ||||
| 	for _, enc := range blockingTests { | ||||
| 		r, w := net.Pipe() | ||||
| 		go w.Write([]byte(enc)) | ||||
| 		var val any | ||||
|  | ||||
| 		// If Decode reads beyond what w.Write writes above, | ||||
| 		// it will block, and the test will deadlock. | ||||
| 		if err := NewDecoder(r).Decode(&val); err != nil { | ||||
| 			t.Errorf("decoding %s: %v", enc, err) | ||||
| 		} | ||||
| 		r.Close() | ||||
| 		w.Close() | ||||
| 	tests := []struct { | ||||
| 		CaseName | ||||
| 		in string | ||||
| 	}{ | ||||
| 		{Name(""), `{"x": 1}`}, | ||||
| 		{Name(""), `[1, 2, 3]`}, | ||||
| 	} | ||||
| } | ||||
| 	for _, tt := range tests { | ||||
| 		t.Run(tt.Name, func(t *testing.T) { | ||||
| 			r, w := net.Pipe() | ||||
| 			go w.Write([]byte(tt.in)) | ||||
| 			var val any | ||||
|  | ||||
| func BenchmarkEncoderEncode(b *testing.B) { | ||||
| 	b.ReportAllocs() | ||||
| 	type T struct { | ||||
| 		X, Y string | ||||
| 	} | ||||
| 	v := &T{"foo", "bar"} | ||||
| 	b.RunParallel(func(pb *testing.PB) { | ||||
| 		for pb.Next() { | ||||
| 			if err := NewEncoder(io.Discard).Encode(v); err != nil { | ||||
| 				b.Fatal(err) | ||||
| 			// If Decode reads beyond what w.Write writes above, | ||||
| 			// it will block, and the test will deadlock. | ||||
| 			if err := NewDecoder(r).Decode(&val); err != nil { | ||||
| 				t.Errorf("%s: NewDecoder(%s).Decode error: %v", tt.Where, tt.in, err) | ||||
| 			} | ||||
| 		} | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| type tokenStreamCase struct { | ||||
| 	json      string | ||||
| 	expTokens []any | ||||
| 			r.Close() | ||||
| 			w.Close() | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| type decodeThis struct { | ||||
| 	v any | ||||
| } | ||||
|  | ||||
| var tokenStreamCases = []tokenStreamCase{ | ||||
| 	// streaming token cases | ||||
| 	{json: `10`, expTokens: []any{float64(10)}}, | ||||
| 	{json: ` [10] `, expTokens: []any{ | ||||
| 		Delim('['), float64(10), Delim(']')}}, | ||||
| 	{json: ` [false,10,"b"] `, expTokens: []any{ | ||||
| 		Delim('['), false, float64(10), "b", Delim(']')}}, | ||||
| 	{json: `{ "a": 1 }`, expTokens: []any{ | ||||
| 		Delim('{'), "a", float64(1), Delim('}')}}, | ||||
| 	{json: `{"a": 1, "b":"3"}`, expTokens: []any{ | ||||
| 		Delim('{'), "a", float64(1), "b", "3", Delim('}')}}, | ||||
| 	{json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{ | ||||
| 		Delim('['), | ||||
| 		Delim('{'), "a", float64(1), Delim('}'), | ||||
| 		Delim('{'), "a", float64(2), Delim('}'), | ||||
| 		Delim(']')}}, | ||||
| 	{json: `{"obj": {"a": 1}}`, expTokens: []any{ | ||||
| 		Delim('{'), "obj", Delim('{'), "a", float64(1), Delim('}'), | ||||
| 		Delim('}')}}, | ||||
| 	{json: `{"obj": [{"a": 1}]}`, expTokens: []any{ | ||||
| 		Delim('{'), "obj", Delim('['), | ||||
| 		Delim('{'), "a", float64(1), Delim('}'), | ||||
| 		Delim(']'), Delim('}')}}, | ||||
|  | ||||
| 	// streaming tokens with intermittent Decode() | ||||
| 	{json: `{ "a": 1 }`, expTokens: []any{ | ||||
| 		Delim('{'), "a", | ||||
| 		decodeThis{float64(1)}, | ||||
| 		Delim('}')}}, | ||||
| 	{json: ` [ { "a" : 1 } ] `, expTokens: []any{ | ||||
| 		Delim('['), | ||||
| 		decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 		Delim(']')}}, | ||||
| 	{json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{ | ||||
| 		Delim('['), | ||||
| 		decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 		decodeThis{map[string]any{"a": float64(2)}}, | ||||
| 		Delim(']')}}, | ||||
| 	{json: `{ "obj" : [ { "a" : 1 } ] }`, expTokens: []any{ | ||||
| 		Delim('{'), "obj", Delim('['), | ||||
| 		decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 		Delim(']'), Delim('}')}}, | ||||
|  | ||||
| 	{json: `{"obj": {"a": 1}}`, expTokens: []any{ | ||||
| 		Delim('{'), "obj", | ||||
| 		decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 		Delim('}')}}, | ||||
| 	{json: `{"obj": [{"a": 1}]}`, expTokens: []any{ | ||||
| 		Delim('{'), "obj", | ||||
| 		decodeThis{[]any{ | ||||
| 			map[string]any{"a": float64(1)}, | ||||
| 		}}, | ||||
| 		Delim('}')}}, | ||||
| 	{json: ` [{"a": 1} {"a": 2}] `, expTokens: []any{ | ||||
| 		Delim('['), | ||||
| 		decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 		decodeThis{&SyntaxError{"expected comma after array element", 11}}, | ||||
| 	}}, | ||||
| 	{json: `{ "` + strings.Repeat("a", 513) + `" 1 }`, expTokens: []any{ | ||||
| 		Delim('{'), strings.Repeat("a", 513), | ||||
| 		decodeThis{&SyntaxError{"expected colon after object key", 518}}, | ||||
| 	}}, | ||||
| 	{json: `{ "\a" }`, expTokens: []any{ | ||||
| 		Delim('{'), | ||||
| 		&SyntaxError{"invalid character 'a' in string escape code", 3}, | ||||
| 	}}, | ||||
| 	{json: ` \a`, expTokens: []any{ | ||||
| 		&SyntaxError{"invalid character '\\\\' looking for beginning of value", 1}, | ||||
| 	}}, | ||||
| } | ||||
|  | ||||
| func TestDecodeInStream(t *testing.T) { | ||||
| 	for ci, tcase := range tokenStreamCases { | ||||
| 	tests := []struct { | ||||
| 		CaseName | ||||
| 		json      string | ||||
| 		expTokens []any | ||||
| 	}{ | ||||
| 		// streaming token cases | ||||
| 		{CaseName: Name(""), json: `10`, expTokens: []any{float64(10)}}, | ||||
| 		{CaseName: Name(""), json: ` [10] `, expTokens: []any{ | ||||
| 			Delim('['), float64(10), Delim(']')}}, | ||||
| 		{CaseName: Name(""), json: ` [false,10,"b"] `, expTokens: []any{ | ||||
| 			Delim('['), false, float64(10), "b", Delim(']')}}, | ||||
| 		{CaseName: Name(""), json: `{ "a": 1 }`, expTokens: []any{ | ||||
| 			Delim('{'), "a", float64(1), Delim('}')}}, | ||||
| 		{CaseName: Name(""), json: `{"a": 1, "b":"3"}`, expTokens: []any{ | ||||
| 			Delim('{'), "a", float64(1), "b", "3", Delim('}')}}, | ||||
| 		{CaseName: Name(""), json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{ | ||||
| 			Delim('['), | ||||
| 			Delim('{'), "a", float64(1), Delim('}'), | ||||
| 			Delim('{'), "a", float64(2), Delim('}'), | ||||
| 			Delim(']')}}, | ||||
| 		{CaseName: Name(""), json: `{"obj": {"a": 1}}`, expTokens: []any{ | ||||
| 			Delim('{'), "obj", Delim('{'), "a", float64(1), Delim('}'), | ||||
| 			Delim('}')}}, | ||||
| 		{CaseName: Name(""), json: `{"obj": [{"a": 1}]}`, expTokens: []any{ | ||||
| 			Delim('{'), "obj", Delim('['), | ||||
| 			Delim('{'), "a", float64(1), Delim('}'), | ||||
| 			Delim(']'), Delim('}')}}, | ||||
|  | ||||
| 		dec := NewDecoder(strings.NewReader(tcase.json)) | ||||
| 		for i, etk := range tcase.expTokens { | ||||
| 		// streaming tokens with intermittent Decode() | ||||
| 		{CaseName: Name(""), json: `{ "a": 1 }`, expTokens: []any{ | ||||
| 			Delim('{'), "a", | ||||
| 			decodeThis{float64(1)}, | ||||
| 			Delim('}')}}, | ||||
| 		{CaseName: Name(""), json: ` [ { "a" : 1 } ] `, expTokens: []any{ | ||||
| 			Delim('['), | ||||
| 			decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 			Delim(']')}}, | ||||
| 		{CaseName: Name(""), json: ` [{"a": 1},{"a": 2}] `, expTokens: []any{ | ||||
| 			Delim('['), | ||||
| 			decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 			decodeThis{map[string]any{"a": float64(2)}}, | ||||
| 			Delim(']')}}, | ||||
| 		{CaseName: Name(""), json: `{ "obj" : [ { "a" : 1 } ] }`, expTokens: []any{ | ||||
| 			Delim('{'), "obj", Delim('['), | ||||
| 			decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 			Delim(']'), Delim('}')}}, | ||||
|  | ||||
| 			var tk any | ||||
| 			var err error | ||||
| 		{CaseName: Name(""), json: `{"obj": {"a": 1}}`, expTokens: []any{ | ||||
| 			Delim('{'), "obj", | ||||
| 			decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 			Delim('}')}}, | ||||
| 		{CaseName: Name(""), json: `{"obj": [{"a": 1}]}`, expTokens: []any{ | ||||
| 			Delim('{'), "obj", | ||||
| 			decodeThis{[]any{ | ||||
| 				map[string]any{"a": float64(1)}, | ||||
| 			}}, | ||||
| 			Delim('}')}}, | ||||
| 		{CaseName: Name(""), json: ` [{"a": 1} {"a": 2}] `, expTokens: []any{ | ||||
| 			Delim('['), | ||||
| 			decodeThis{map[string]any{"a": float64(1)}}, | ||||
| 			decodeThis{&SyntaxError{"expected comma after array element", 11}}, | ||||
| 		}}, | ||||
| 		{CaseName: Name(""), json: `{ "` + strings.Repeat("a", 513) + `" 1 }`, expTokens: []any{ | ||||
| 			Delim('{'), strings.Repeat("a", 513), | ||||
| 			decodeThis{&SyntaxError{"expected colon after object key", 518}}, | ||||
| 		}}, | ||||
| 		{CaseName: Name(""), json: `{ "\a" }`, expTokens: []any{ | ||||
| 			Delim('{'), | ||||
| 			&SyntaxError{"invalid character 'a' in string escape code", 3}, | ||||
| 		}}, | ||||
| 		{CaseName: Name(""), json: ` \a`, expTokens: []any{ | ||||
| 			&SyntaxError{"invalid character '\\\\' looking for beginning of value", 1}, | ||||
| 		}}, | ||||
| 	} | ||||
| 	for _, tt := range tests { | ||||
| 		t.Run(tt.Name, func(t *testing.T) { | ||||
| 			dec := NewDecoder(strings.NewReader(tt.json)) | ||||
| 			for i, want := range tt.expTokens { | ||||
| 				var got any | ||||
| 				var err error | ||||
|  | ||||
| 			if dt, ok := etk.(decodeThis); ok { | ||||
| 				etk = dt.v | ||||
| 				err = dec.Decode(&tk) | ||||
| 			} else { | ||||
| 				tk, err = dec.Token() | ||||
| 			} | ||||
| 			if experr, ok := etk.(error); ok { | ||||
| 				if err == nil || !reflect.DeepEqual(err, experr) { | ||||
| 					t.Errorf("case %v: Expected error %#v in %q, but was %#v", ci, experr, tcase.json, err) | ||||
| 				if dt, ok := want.(decodeThis); ok { | ||||
| 					want = dt.v | ||||
| 					err = dec.Decode(&got) | ||||
| 				} else { | ||||
| 					got, err = dec.Token() | ||||
| 				} | ||||
| 				if errWant, ok := want.(error); ok { | ||||
| 					if err == nil || !reflect.DeepEqual(err, errWant) { | ||||
| 						t.Fatalf("%s:\n\tinput: %s\n\tgot error:  %v\n\twant error: %v", tt.Where, tt.json, err, errWant) | ||||
| 					} | ||||
| 					break | ||||
| 				} else if err != nil { | ||||
| 					t.Fatalf("%s:\n\tinput: %s\n\tgot error:  %v\n\twant error: nil", tt.Where, tt.json, err) | ||||
| 				} | ||||
| 				if !reflect.DeepEqual(got, want) { | ||||
| 					t.Fatalf("%s: token %d:\n\tinput: %s\n\tgot:  %T(%v)\n\twant: %T(%v)", tt.Where, i, tt.json, got, got, want, want) | ||||
| 				} | ||||
| 				break | ||||
| 			} else if err == io.EOF { | ||||
| 				t.Errorf("case %v: Unexpected EOF in %q", ci, tcase.json) | ||||
| 				break | ||||
| 			} else if err != nil { | ||||
| 				t.Errorf("case %v: Unexpected error '%#v' in %q", ci, err, tcase.json) | ||||
| 				break | ||||
| 			} | ||||
| 			if !reflect.DeepEqual(tk, etk) { | ||||
| 				t.Errorf(`case %v: %q @ %v expected %T(%v) was %T(%v)`, ci, tcase.json, i, etk, etk, tk, tk) | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -449,7 +497,7 @@ func TestHTTPDecoding(t *testing.T) { | ||||
| 	defer ts.Close() | ||||
| 	res, err := http.Get(ts.URL) | ||||
| 	if err != nil { | ||||
| 		log.Fatalf("GET failed: %v", err) | ||||
| 		log.Fatalf("http.Get error: %v", err) | ||||
| 	} | ||||
| 	defer res.Body.Close() | ||||
|  | ||||
| @@ -460,15 +508,15 @@ func TestHTTPDecoding(t *testing.T) { | ||||
| 	d := NewDecoder(res.Body) | ||||
| 	err = d.Decode(&foo) | ||||
| 	if err != nil { | ||||
| 		t.Fatalf("Decode: %v", err) | ||||
| 		t.Fatalf("Decode error: %v", err) | ||||
| 	} | ||||
| 	if foo.Foo != "bar" { | ||||
| 		t.Errorf("decoded %q; want \"bar\"", foo.Foo) | ||||
| 		t.Errorf(`Decode: got %q, want "bar"`, foo.Foo) | ||||
| 	} | ||||
|  | ||||
| 	// make sure we get the EOF the second time | ||||
| 	err = d.Decode(&foo) | ||||
| 	if err != io.EOF { | ||||
| 		t.Errorf("err = %v; want io.EOF", err) | ||||
| 		t.Errorf("Decode error:\n\tgot:  %v\n\twant: io.EOF", err) | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -72,49 +72,50 @@ type unicodeTag struct { | ||||
| 	W string `json:"Ελλάδα"` | ||||
| } | ||||
|  | ||||
| var structTagObjectKeyTests = []struct { | ||||
| 	raw   any | ||||
| 	value string | ||||
| 	key   string | ||||
| }{ | ||||
| 	{basicLatin2xTag{"2x"}, "2x", "$%-/"}, | ||||
| 	{basicLatin3xTag{"3x"}, "3x", "0123456789"}, | ||||
| 	{basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"}, | ||||
| 	{basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"}, | ||||
| 	{basicLatin6xTag{"6x"}, "6x", "abcdefghijklmno"}, | ||||
| 	{basicLatin7xTag{"7x"}, "7x", "pqrstuvwxyz"}, | ||||
| 	{miscPlaneTag{"いろはにほへと"}, "いろはにほへと", "色は匂へど"}, | ||||
| 	{dashTag{"foo"}, "foo", "-"}, | ||||
| 	{emptyTag{"Pour Moi"}, "Pour Moi", "W"}, | ||||
| 	{misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"}, | ||||
| 	{badFormatTag{"Orfevre"}, "Orfevre", "Y"}, | ||||
| 	{badCodeTag{"Reliable Man"}, "Reliable Man", "Z"}, | ||||
| 	{percentSlashTag{"brut"}, "brut", "text/html%"}, | ||||
| 	{punctuationTag{"Union Rags"}, "Union Rags", "!#$%&()*+-./:;<=>?@[]^_{|}~ "}, | ||||
| 	{spaceTag{"Perreddu"}, "Perreddu", "With space"}, | ||||
| 	{unicodeTag{"Loukanikos"}, "Loukanikos", "Ελλάδα"}, | ||||
| } | ||||
|  | ||||
| func TestStructTagObjectKey(t *testing.T) { | ||||
| 	for _, tt := range structTagObjectKeyTests { | ||||
| 		b, err := Marshal(tt.raw) | ||||
| 		if err != nil { | ||||
| 			t.Fatalf("Marshal(%#q) failed: %v", tt.raw, err) | ||||
| 		} | ||||
| 		var f any | ||||
| 		err = Unmarshal(b, &f) | ||||
| 		if err != nil { | ||||
| 			t.Fatalf("Unmarshal(%#q) failed: %v", b, err) | ||||
| 		} | ||||
| 		for i, v := range f.(map[string]any) { | ||||
| 			switch i { | ||||
| 			case tt.key: | ||||
| 				if s, ok := v.(string); !ok || s != tt.value { | ||||
| 					t.Fatalf("Unexpected value: %#q, want %v", s, tt.value) | ||||
| 				} | ||||
| 			default: | ||||
| 				t.Fatalf("Unexpected key: %#q, from %#q", i, b) | ||||
| 	tests := []struct { | ||||
| 		CaseName | ||||
| 		raw   any | ||||
| 		value string | ||||
| 		key   string | ||||
| 	}{ | ||||
| 		{Name(""), basicLatin2xTag{"2x"}, "2x", "$%-/"}, | ||||
| 		{Name(""), basicLatin3xTag{"3x"}, "3x", "0123456789"}, | ||||
| 		{Name(""), basicLatin4xTag{"4x"}, "4x", "ABCDEFGHIJKLMO"}, | ||||
| 		{Name(""), basicLatin5xTag{"5x"}, "5x", "PQRSTUVWXYZ_"}, | ||||
| 		{Name(""), basicLatin6xTag{"6x"}, "6x", "abcdefghijklmno"}, | ||||
| 		{Name(""), basicLatin7xTag{"7x"}, "7x", "pqrstuvwxyz"}, | ||||
| 		{Name(""), miscPlaneTag{"いろはにほへと"}, "いろはにほへと", "色は匂へど"}, | ||||
| 		{Name(""), dashTag{"foo"}, "foo", "-"}, | ||||
| 		{Name(""), emptyTag{"Pour Moi"}, "Pour Moi", "W"}, | ||||
| 		{Name(""), misnamedTag{"Animal Kingdom"}, "Animal Kingdom", "X"}, | ||||
| 		{Name(""), badFormatTag{"Orfevre"}, "Orfevre", "Y"}, | ||||
| 		{Name(""), badCodeTag{"Reliable Man"}, "Reliable Man", "Z"}, | ||||
| 		{Name(""), percentSlashTag{"brut"}, "brut", "text/html%"}, | ||||
| 		{Name(""), punctuationTag{"Union Rags"}, "Union Rags", "!#$%&()*+-./:;<=>?@[]^_{|}~ "}, | ||||
| 		{Name(""), spaceTag{"Perreddu"}, "Perreddu", "With space"}, | ||||
| 		{Name(""), unicodeTag{"Loukanikos"}, "Loukanikos", "Ελλάδα"}, | ||||
| 	} | ||||
| 	for _, tt := range tests { | ||||
| 		t.Run(tt.Name, func(t *testing.T) { | ||||
| 			b, err := Marshal(tt.raw) | ||||
| 			if err != nil { | ||||
| 				t.Fatalf("%s: Marshal error: %v", tt.Where, err) | ||||
| 			} | ||||
| 		} | ||||
| 			var f any | ||||
| 			err = Unmarshal(b, &f) | ||||
| 			if err != nil { | ||||
| 				t.Fatalf("%s: Unmarshal error: %v", tt.Where, err) | ||||
| 			} | ||||
| 			for k, v := range f.(map[string]any) { | ||||
| 				if k == tt.key { | ||||
| 					if s, ok := v.(string); !ok || s != tt.value { | ||||
| 						t.Fatalf("%s: Unmarshal(%#q) value:\n\tgot:  %q\n\twant: %q", tt.Where, b, s, tt.value) | ||||
| 					} | ||||
| 				} else { | ||||
| 					t.Fatalf("%s: Unmarshal(%#q): unexpected key: %q", tt.Where, b, k) | ||||
| 				} | ||||
| 			} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -22,7 +22,7 @@ func TestTagParsing(t *testing.T) { | ||||
| 		{"bar", false}, | ||||
| 	} { | ||||
| 		if opts.Contains(tt.opt) != tt.want { | ||||
| 			t.Errorf("Contains(%q) = %v", tt.opt, !tt.want) | ||||
| 			t.Errorf("Contains(%q) = %v, want %v", tt.opt, !tt.want, tt.want) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -169,7 +169,7 @@ func EncodeImage(img image.Image, compression ImageCompresson) (bytes.Buffer, st | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fillColor color.Color) (image.Image, error) { | ||||
| func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fillColor color.Color) (image.Image, PercentageRectangle, error) { | ||||
|  | ||||
| 	iw := img.Bounds().Size().X | ||||
| 	ih := img.Bounds().Size().Y | ||||
| @@ -214,12 +214,12 @@ func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fil | ||||
| 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||
| 		draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over) | ||||
|  | ||||
| 		return newImg, nil | ||||
| 		return newImg, PercentageRectangle{0, 0, 1, 1}, nil | ||||
| 	} | ||||
|  | ||||
| 	if fit == ImageFitContainCenter || fit == ImageFitContainTopLeft || fit == ImageFitContainTopRight || fit == ImageFitContainBottomLeft || fit == ImageFitContainBottomRight { | ||||
|  | ||||
| 		// image-fit:cover fills the target-bounding-box with the image, there is potentially empty-space, it potentially cuts parts of the image away | ||||
| 		// image-fit:contain fills the target-bounding-box with the image, there is potentially empty-space, it potentially cuts parts of the image away | ||||
|  | ||||
| 		// we use the bigger (!) value of facW and facH, | ||||
| 		// because the image is made to fit the bounding-box, the bigger factor (= the dimension the image is stretched less) is relevant | ||||
| @@ -266,7 +266,7 @@ func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fil | ||||
| 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||
| 		draw.Draw(newImg, destBounds, img, image.Pt(0, 0), draw.Over) | ||||
|  | ||||
| 		return newImg, nil | ||||
| 		return newImg, calcRelativeRect(destBounds, newImg.Bounds()), nil | ||||
| 	} | ||||
|  | ||||
| 	if fit == ImageFitStretch { | ||||
| @@ -293,10 +293,10 @@ func ObjectFitImage(img image.Image, bbw float64, bbh float64, fit ImageFit, fil | ||||
| 		draw.Draw(newImg, newImg.Bounds(), &image.Uniform{C: fillColor}, image.Pt(0, 0), draw.Src) | ||||
| 		draw.Draw(newImg, newImg.Bounds(), img, image.Pt(0, 0), draw.Over) | ||||
|  | ||||
| 		return newImg, nil | ||||
| 		return newImg, PercentageRectangle{0, 0, 1, 1}, nil | ||||
| 	} | ||||
|  | ||||
| 	return nil, exerr.New(exerr.TypeInternal, fmt.Sprintf("unknown image-fit: '%s'", fit)).Build() | ||||
| 	return nil, PercentageRectangle{}, exerr.New(exerr.TypeInternal, fmt.Sprintf("unknown image-fit: '%s'", fit)).Build() | ||||
| } | ||||
|  | ||||
| func VerifyAndDecodeImage(data io.Reader, mime string) (image.Image, error) { | ||||
|   | ||||
							
								
								
									
										35
									
								
								imageext/types.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								imageext/types.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| package imageext | ||||
|  | ||||
| import "image" | ||||
|  | ||||
| type Rectangle struct { | ||||
| 	X float64 | ||||
| 	Y float64 | ||||
| 	W float64 | ||||
| 	H float64 | ||||
| } | ||||
|  | ||||
| type PercentageRectangle struct { | ||||
| 	X float64 // [0..1] | ||||
| 	Y float64 // [0..1] | ||||
| 	W float64 // [0..1] | ||||
| 	H float64 // [0..1] | ||||
| } | ||||
|  | ||||
| func (r PercentageRectangle) Of(ref Rectangle) Rectangle { | ||||
| 	return Rectangle{ | ||||
| 		X: ref.X + r.X*ref.W, | ||||
| 		Y: ref.Y + r.Y*ref.H, | ||||
| 		W: r.W * ref.W, | ||||
| 		H: r.H * ref.H, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func calcRelativeRect(inner image.Rectangle, outer image.Rectangle) PercentageRectangle { | ||||
| 	return PercentageRectangle{ | ||||
| 		X: float64(inner.Min.X-outer.Min.X) / float64(outer.Dx()), | ||||
| 		Y: float64(inner.Min.Y-outer.Min.Y) / float64(outer.Dy()), | ||||
| 		W: float64(inner.Dx()) / float64(outer.Dx()), | ||||
| 		H: float64(inner.Dy()) / float64(outer.Dy()), | ||||
| 	} | ||||
| } | ||||
| @@ -24,6 +24,7 @@ func Range[T IntegerConstraint](start T, end T) []T { | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| // ForceArray ensures that the given array is not nil (nil will be converted to empty) | ||||
| func ForceArray[T any](v []T) []T { | ||||
| 	if v == nil { | ||||
| 		return make([]T, 0) | ||||
| @@ -47,6 +48,16 @@ func InArray[T comparable](needle T, haystack []T) bool { | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // ArrContains checks if the value is contained in the array (same as InArray, but odther name for better findability) | ||||
| func ArrContains[T comparable](haystack []T, needle T) bool { | ||||
| 	for _, v := range haystack { | ||||
| 		if v == needle { | ||||
| 			return true | ||||
| 		} | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func ArrUnique[T comparable](array []T) []T { | ||||
| 	m := make(map[T]bool, len(array)) | ||||
| 	for _, v := range array { | ||||
| @@ -323,6 +334,16 @@ func ArrMap[T1 any, T2 any](arr []T1, conv func(v T1) T2) []T2 { | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func ArrDeRef[T1 any](arr []*T1) []T1 { | ||||
| 	r := make([]T1, 0, len(arr)) | ||||
| 	for _, v := range arr { | ||||
| 		if v != nil { | ||||
| 			r = append(r, *v) | ||||
| 		} | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func MapMap[TK comparable, TV any, TR any](inmap map[TK]TV, conv func(k TK, v TV) TR) []TR { | ||||
| 	r := make([]TR, 0, len(inmap)) | ||||
| 	for k, v := range inmap { | ||||
| @@ -391,6 +412,14 @@ func ArrSum[T NumberConstraint](arr []T) T { | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func ArrMapSum[T1 any, T2 NumberConstraint](arr []T1, conv func(v T1) T2) T2 { | ||||
| 	var r T2 = 0 | ||||
| 	for _, v := range arr { | ||||
| 		r += conv(v) | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func ArrFlatten[T1 any, T2 any](arr []T1, conv func(v T1) []T2) []T2 { | ||||
| 	r := make([]T2, 0, len(arr)) | ||||
| 	for _, v1 := range arr { | ||||
|   | ||||
| @@ -133,6 +133,31 @@ func CoalesceStringer(s fmt.Stringer, def string) string { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func CoalesceDefault[T comparable](v1 T, def T) T { | ||||
| 	if v1 != *new(T) { | ||||
| 		return v1 | ||||
| 	} | ||||
|  | ||||
| 	return def | ||||
| } | ||||
|  | ||||
| func CoalesceDefaultArr[T comparable](v1 T, vMore ...T) T { | ||||
| 	if v1 != *new(T) { | ||||
| 		return v1 | ||||
| 	} | ||||
| 	if len(vMore) == 0 { | ||||
| 		return v1 | ||||
| 	} | ||||
|  | ||||
| 	for i := 0; i < len(vMore)-1; i++ { | ||||
| 		if vMore[i] != *new(T) { | ||||
| 			return v1 | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return vMore[len(vMore)-1] | ||||
| } | ||||
|  | ||||
| func SafeCast[T any](v any, def T) T { | ||||
| 	switch r := v.(type) { | ||||
| 	case T: | ||||
|   | ||||
							
								
								
									
										15
									
								
								langext/io.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								langext/io.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| package langext | ||||
|  | ||||
| import "io" | ||||
|  | ||||
| type nopCloser struct { | ||||
| 	io.Writer | ||||
| } | ||||
|  | ||||
| func (n nopCloser) Close() error { | ||||
| 	return nil // no op | ||||
| } | ||||
|  | ||||
| func WriteNopCloser(w io.Writer) io.WriteCloser { | ||||
| 	return nopCloser{w} | ||||
| } | ||||
							
								
								
									
										21
									
								
								langext/iter.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								langext/iter.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| package langext | ||||
|  | ||||
| import ( | ||||
| 	"iter" | ||||
| ) | ||||
|  | ||||
| func IterSingleValueSeq[T any](value T) iter.Seq[T] { | ||||
| 	return func(yield func(T) bool) { | ||||
| 		if !yield(value) { | ||||
| 			return | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func IterSingleValueSeq2[T1 any, T2 any](v1 T1, v2 T2) iter.Seq2[T1, T2] { | ||||
| 	return func(yield func(T1, T2) bool) { | ||||
| 		if !yield(v1, v2) { | ||||
| 			return | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
| @@ -66,7 +66,7 @@ func CopyMap[K comparable, V any](a map[K]V) map[K]V { | ||||
|  | ||||
| func ForceMap[K comparable, V any](v map[K]V) map[K]V { | ||||
| 	if v == nil { | ||||
| 		return make(map[K]V, 0) | ||||
| 		return make(map[K]V) | ||||
| 	} else { | ||||
| 		return v | ||||
| 	} | ||||
|   | ||||
| @@ -11,7 +11,7 @@ func (p PanicWrappedErr) Error() string { | ||||
| 	return "A panic occured" | ||||
| } | ||||
|  | ||||
| func (p PanicWrappedErr) ReoveredObj() any { | ||||
| func (p PanicWrappedErr) RecoveredObj() any { | ||||
| 	return p.panic | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,7 @@ | ||||
| package mathext | ||||
|  | ||||
| import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
|  | ||||
| func ClampInt(v int, lo int, hi int) int { | ||||
| 	if v < lo { | ||||
| 		return lo | ||||
| @@ -39,3 +41,25 @@ func ClampFloat64(v float64, lo float64, hi float64) float64 { | ||||
| 		return v | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Clamp[T langext.NumberConstraint](v T, min T, max T) T { | ||||
| 	if v < min { | ||||
| 		return min | ||||
| 	} else if v > max { | ||||
| 		return max | ||||
| 	} else { | ||||
| 		return v | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func ClampOpt[T langext.NumberConstraint](v *T, fallback T, min T, max T) T { | ||||
| 	if v == nil { | ||||
| 		return fallback | ||||
| 	} else if *v < min { | ||||
| 		return min | ||||
| 	} else if *v > max { | ||||
| 		return max | ||||
| 	} else { | ||||
| 		return *v | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -87,13 +87,3 @@ func Abs[T langext.NumberConstraint](v T) T { | ||||
| 		return v | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Clamp[T langext.NumberConstraint](v T, min T, max T) T { | ||||
| 	if v < min { | ||||
| 		return min | ||||
| 	} else if v > max { | ||||
| 		return max | ||||
| 	} else { | ||||
| 		return v | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -1,6 +1,9 @@ | ||||
| package mathext | ||||
|  | ||||
| import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| func Sum[T langext.NumberConstraint](v []T) T { | ||||
| 	total := T(0) | ||||
| @@ -41,3 +44,53 @@ func ArrMax[T langext.OrderedConstraint](v []T) T { | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func MustPercentile[T langext.NumberConstraint](rawdata []T, percentile float64) T { | ||||
| 	v, err := Percentile(rawdata, percentile) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| func Percentile[T langext.NumberConstraint](rawdata []T, percentile float64) (T, error) { | ||||
| 	v, err := FloatPercentile(rawdata, percentile) | ||||
| 	if err != nil { | ||||
| 		return T(0), err | ||||
| 	} | ||||
| 	return T(v), nil | ||||
|  | ||||
| } | ||||
|  | ||||
| func FloatPercentile[T langext.NumberConstraint](rawdata []T, percentile float64) (float64, error) { | ||||
| 	if len(rawdata) == 0 { | ||||
| 		return 0, exerr.New(exerr.TypeAssert, "no data to calculate percentile").Any("percentile", percentile).Build() | ||||
| 	} | ||||
|  | ||||
| 	if percentile < 0 || percentile > 100 { | ||||
| 		return 0, exerr.New(exerr.TypeAssert, "percentile out of range").Any("percentile", percentile).Build() | ||||
| 	} | ||||
|  | ||||
| 	data := langext.ArrCopy(rawdata) | ||||
| 	langext.Sort(data) | ||||
|  | ||||
| 	idxFloat := float64(len(data)-1) * (percentile / float64(100)) | ||||
|  | ||||
| 	idxInt := int(idxFloat) | ||||
|  | ||||
| 	// exact match on index | ||||
| 	if idxFloat == float64(idxInt) { | ||||
| 		return float64(data[idxInt]), nil | ||||
| 	} | ||||
|  | ||||
| 	// linear interpolation | ||||
| 	v1 := data[idxInt] | ||||
| 	v2 := data[idxInt+1] | ||||
|  | ||||
| 	weight := idxFloat - float64(idxInt) | ||||
|  | ||||
| 	valFloat := (float64(v1) * (1 - weight)) + (float64(v2) * weight) | ||||
|  | ||||
| 	return valFloat, nil | ||||
|  | ||||
| } | ||||
|   | ||||
							
								
								
									
										238
									
								
								mathext/statistics_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										238
									
								
								mathext/statistics_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,238 @@ | ||||
| package mathext | ||||
|  | ||||
| import ( | ||||
| 	"math" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| func TestSumIntsHappyPath(t *testing.T) { | ||||
| 	values := []int{1, 2, 3, 4, 5} | ||||
| 	expected := 15 | ||||
| 	result := Sum(values) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Sum of %v; expected %v, got %v", values, expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestSumFloatsHappyPath(t *testing.T) { | ||||
| 	values := []float64{1.1, 2.2, 3.3} | ||||
| 	expected := 6.6 | ||||
| 	result := Sum(values) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Sum of %v; expected %v, got %v", values, expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestMeanOfInts(t *testing.T) { | ||||
| 	values := []float64{1, 2, 3, 4, 5} | ||||
| 	expected := 3.0 | ||||
| 	result := Mean(values) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Mean of %v; expected %v, got %v", values, expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestMedianOddNumberOfElements(t *testing.T) { | ||||
| 	values := []float64{1, 2, 3, 4, 5} | ||||
| 	expected := 3.0 | ||||
| 	result := Median(values) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Median of %v; expected %v, got %v", values, expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestMedianEvenNumberOfElements(t *testing.T) { | ||||
| 	values := []float64{1, 2, 3, 4, 5, 6} | ||||
| 	expected := 3.5 | ||||
| 	result := Median(values) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Median of %v; expected %v, got %v", values, expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestArrMinInts(t *testing.T) { | ||||
| 	values := []int{5, 3, 9, 1, 4} | ||||
| 	expected := 1 | ||||
| 	result := ArrMin(values) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("ArrMin of %v; expected %v, got %v", values, expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestArrMaxInts(t *testing.T) { | ||||
| 	values := []int{5, 3, 9, 1, 4} | ||||
| 	expected := 9 | ||||
| 	result := ArrMax(values) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("ArrMax of %v; expected %v, got %v", values, expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileValidInput(t *testing.T) { | ||||
| 	values := []int{1, 2, 3, 4, 5} | ||||
| 	percentile := 50.0 | ||||
| 	expected := 3 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileOutOfRange(t *testing.T) { | ||||
| 	values := []int{1, 2, 3, 4, 5} | ||||
| 	percentile := 150.0 | ||||
| 	_, err := Percentile(values, percentile) | ||||
| 	if err == nil { | ||||
| 		t.Errorf("Expected error for percentile %v out of range, got nil", percentile) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileValueInArray(t *testing.T) { | ||||
| 	values := []int{1, 3, 5, 7, 9} | ||||
| 	percentile := 40.0 | ||||
| 	expected := 4 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestFloatPercentileValueInArray(t *testing.T) { | ||||
| 	values := []int{1, 3, 5, 7, 9} | ||||
| 	percentile := 40.0 | ||||
| 	expected := 4.2 | ||||
| 	result, err := FloatPercentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileInterpolation(t *testing.T) { | ||||
| 	values := []float64{1.0, 2.0, 3.0, 4.0, 5.0} | ||||
| 	percentile := 25.0 | ||||
| 	expected := 2.0 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileSingleValue(t *testing.T) { | ||||
| 	values := []int{10} | ||||
| 	percentile := 50.0 | ||||
| 	expected := 10 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileExactlyBetweenTwoValues(t *testing.T) { | ||||
| 	values := []float64{1, 2, 3, 4, 5} | ||||
| 	percentile := 62.5 // Exactly between 3 and 4 | ||||
| 	expected := 3.5 | ||||
| 	result, err := FloatPercentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileTwoThirdsBetweenTwoValues(t *testing.T) { | ||||
| 	values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10} | ||||
| 	percentile := 66.666666666666 | ||||
| 	expected := 6.666666666666667 // Since 2/3 of the way between 6 and 7 is 6.666... | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || math.Abs(result-expected) > 1e-9 { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileBetweenTwoValues1(t *testing.T) { | ||||
| 	values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10} | ||||
| 	percentile := 11.0 | ||||
| 	expected := 1.1 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || math.Abs(result-expected) > 1e-9 { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileBetweenTwoValues2(t *testing.T) { | ||||
| 	values := []float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10} | ||||
| 	percentile := 9.0 | ||||
| 	expected := 0.9 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || math.Abs(result-expected) > 1e-9 { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileUnsortedInput(t *testing.T) { | ||||
| 	values := []float64{5, 1, 4, 2, 3} // Unsorted input | ||||
| 	percentile := 50.0 | ||||
| 	expected := 3.0 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileUnsortedInputLowPercentile(t *testing.T) { | ||||
| 	values := []float64{10, 6, 7, 3, 2, 9, 8, 1, 4, 5} // Unsorted input | ||||
| 	percentile := 10.0 | ||||
| 	expected := 1.9 // Expecting interpolation between 1 and 2 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPercentileUnsortedInputHighPercentile(t *testing.T) { | ||||
| 	values := []float64{10, 6, 7, 3, 2, 9, 8, 1, 4, 5} // Unsorted input | ||||
| 	percentile := 90.0 | ||||
| 	expected := 9.1 // Expecting interpolation between 9 and 10 | ||||
| 	result, err := Percentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("Percentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestFloatPercentileExactValueFromInput(t *testing.T) { | ||||
| 	values := []float64{1.5, 2.5, 3.5, 4.5, 5.5} | ||||
| 	percentile := 50.0 // Exact value from input array should be 3.5 | ||||
| 	expected := 3.5 | ||||
| 	result, err := FloatPercentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestFloatPercentileInterpolatedValue(t *testing.T) { | ||||
| 	values := []float64{1.0, 2.0, 3.0, 4.0, 5.0} | ||||
| 	percentile := 87.5 // Interpolated value between 4.0 and 5.0 | ||||
| 	expected := 4.5 | ||||
| 	result, err := FloatPercentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestFloatPercentileUnsortedInputExactValue(t *testing.T) { | ||||
| 	values := []float64{5.5, 1.5, 4.5, 2.5, 3.5} // Unsorted input | ||||
| 	percentile := 50.0 | ||||
| 	expected := 3.5 | ||||
| 	result, err := FloatPercentile(values, percentile) | ||||
| 	if err != nil || result != expected { | ||||
| 		t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestFloatPercentileUnsortedInputInterpolatedValue(t *testing.T) { | ||||
| 	values := []float64{10.5, 6.5, 7.5, 3.5, 2.5, 9.5, 8.5, 1.5, 4.5, 5.5} | ||||
| 	percentile := 80.0 // Interpolated value between 4.0 and 5.0 | ||||
| 	expected := 8.7 | ||||
| 	result, err := FloatPercentile(values, percentile) | ||||
| 	if err != nil || math.Abs(result-expected) > 1e-9 { | ||||
| 		t.Errorf("FloatPercentile %v of %v; expected %v, got %v, err: %v", percentile, values, expected, result, err) | ||||
| 	} | ||||
| } | ||||
| @@ -4,11 +4,13 @@ import ( | ||||
| 	"encoding/json" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| type ConvertStructToMapOpt struct { | ||||
| 	KeepJsonMarshalTypes bool | ||||
| 	MaxDepth             *int | ||||
| 	UseTagsAsKeys        *string | ||||
| } | ||||
|  | ||||
| func ConvertStructToMap(v any, opts ...ConvertStructToMapOpt) map[string]any { | ||||
| @@ -90,7 +92,21 @@ func reflectToMap(fv reflect.Value, depth int, opt ConvertStructToMapOpt) any { | ||||
|  | ||||
| 		for i := 0; i < fv.NumField(); i++ { | ||||
| 			if fv.Type().Field(i).IsExported() { | ||||
| 				res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i), depth+1, opt) | ||||
|  | ||||
| 				k := fv.Type().Field(i).Name | ||||
| 				if opt.UseTagsAsKeys != nil { | ||||
| 					if tagval, ok := fv.Type().Field(i).Tag.Lookup(*opt.UseTagsAsKeys); ok { | ||||
| 						if strings.Contains(tagval, ",") { | ||||
| 							k = strings.TrimSpace(strings.Split(tagval, ",")[0]) | ||||
| 						} else { | ||||
| 							k = strings.TrimSpace(tagval) | ||||
| 						} | ||||
| 					} else { | ||||
| 						continue | ||||
| 					} | ||||
| 				} | ||||
|  | ||||
| 				res[k] = reflectToMap(fv.Field(i), depth+1, opt) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
|   | ||||
| @@ -7,6 +7,7 @@ import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"sync" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type DB interface { | ||||
| @@ -57,89 +58,121 @@ func (db *database) AddListener(listener Listener) { | ||||
|  | ||||
| func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) { | ||||
| 	origsql := sqlstr | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	preMeta := PreExecMeta{Context: ctx, TransactionConstructorContext: nil} | ||||
| 	for _, v := range db.lstr { | ||||
| 		err := v.PreExec(ctx, nil, &sqlstr, &prep) | ||||
| 		err := v.PreExec(ctx, nil, &sqlstr, &prep, preMeta) | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	t1 := time.Now() | ||||
|  | ||||
| 	res, err := db.db.NamedExecContext(ctx, sqlstr, prep) | ||||
|  | ||||
| 	postMeta := PostExecMeta{Context: ctx, TransactionConstructorContext: nil, Init: t0, Start: t1, End: time.Now()} | ||||
| 	for _, v := range db.lstr { | ||||
| 		v.PostExec(nil, origsql, sqlstr, prep) | ||||
| 		v.PostExec(nil, origsql, sqlstr, prep, err, postMeta) | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) { | ||||
| 	origsql := sqlstr | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	preMeta := PreQueryMeta{Context: ctx, TransactionConstructorContext: nil} | ||||
| 	for _, v := range db.lstr { | ||||
| 		err := v.PreQuery(ctx, nil, &sqlstr, &prep) | ||||
| 		err := v.PreQuery(ctx, nil, &sqlstr, &prep, preMeta) | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	t1 := time.Now() | ||||
|  | ||||
| 	rows, err := sqlx.NamedQueryContext(ctx, db.db, sqlstr, prep) | ||||
|  | ||||
| 	postMeta := PostQueryMeta{Context: ctx, TransactionConstructorContext: nil, Init: t0, Start: t1, End: time.Now()} | ||||
| 	for _, v := range db.lstr { | ||||
| 		v.PostQuery(nil, origsql, sqlstr, prep) | ||||
| 		v.PostQuery(nil, origsql, sqlstr, prep, err, postMeta) | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
|  | ||||
| 	return rows, nil | ||||
| } | ||||
|  | ||||
| func (db *database) Ping(ctx context.Context) error { | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	preMeta := PrePingMeta{Context: ctx} | ||||
| 	for _, v := range db.lstr { | ||||
| 		err := v.PrePing(ctx) | ||||
| 		err := v.PrePing(ctx, preMeta) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	t1 := time.Now() | ||||
|  | ||||
| 	err := db.db.PingContext(ctx) | ||||
|  | ||||
| 	postMeta := PostPingMeta{Context: ctx, Init: t0, Start: t1, End: time.Now()} | ||||
| 	for _, v := range db.lstr { | ||||
| 		v.PostPing(err) | ||||
| 		v.PostPing(err, postMeta) | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return exerr.Wrap(err, "Failed to [ping] sql database").Build() | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (db *database) BeginTransaction(ctx context.Context, iso sql.IsolationLevel) (Tx, error) { | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	db.lock.Lock() | ||||
| 	txid := db.txctr | ||||
| 	db.txctr += 1 // with overflow ! | ||||
| 	db.lock.Unlock() | ||||
|  | ||||
| 	preMeta := PreTxBeginMeta{Context: ctx} | ||||
| 	for _, v := range db.lstr { | ||||
| 		err := v.PreTxBegin(ctx, txid) | ||||
| 		err := v.PreTxBegin(ctx, txid, preMeta) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	t1 := time.Now() | ||||
|  | ||||
| 	xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso}) | ||||
|  | ||||
| 	postMeta := PostTxBeginMeta{Context: ctx, Init: t0, Start: t1, End: time.Now()} | ||||
| 	for _, v := range db.lstr { | ||||
| 		v.PostTxBegin(txid, err, postMeta) | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "Failed to start sql transaction").Build() | ||||
| 	} | ||||
|  | ||||
| 	for _, v := range db.lstr { | ||||
| 		v.PostTxBegin(txid, err) | ||||
| 	} | ||||
|  | ||||
| 	return NewTransaction(xtx, txid, db), nil | ||||
| 	return newTransaction(ctx, xtx, txid, db), nil | ||||
| } | ||||
|  | ||||
| func (db *database) Exit() error { | ||||
|   | ||||
							
								
								
									
										214
									
								
								sq/listener.go
									
									
									
									
									
								
							
							
						
						
									
										214
									
								
								sq/listener.go
									
									
									
									
									
								
							| @@ -1,188 +1,266 @@ | ||||
| package sq | ||||
|  | ||||
| import "context" | ||||
| import ( | ||||
| 	"context" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type PrePingMeta struct { | ||||
| 	Context context.Context | ||||
| } | ||||
|  | ||||
| type PreTxBeginMeta struct { | ||||
| 	Context            context.Context | ||||
| 	ConstructorContext context.Context | ||||
| } | ||||
|  | ||||
| type PreTxCommitMeta struct { | ||||
| 	ConstructorContext context.Context | ||||
| } | ||||
|  | ||||
| type PreTxRollbackMeta struct { | ||||
| 	ConstructorContext context.Context | ||||
| } | ||||
|  | ||||
| type PreQueryMeta struct { | ||||
| 	Context                       context.Context | ||||
| 	TransactionConstructorContext context.Context | ||||
| } | ||||
|  | ||||
| type PreExecMeta struct { | ||||
| 	Context                       context.Context | ||||
| 	TransactionConstructorContext context.Context | ||||
| } | ||||
|  | ||||
| type PostPingMeta struct { | ||||
| 	Context context.Context | ||||
| 	Init    time.Time | ||||
| 	Start   time.Time | ||||
| 	End     time.Time | ||||
| } | ||||
|  | ||||
| type PostTxBeginMeta struct { | ||||
| 	Context context.Context | ||||
| 	Init    time.Time | ||||
| 	Start   time.Time | ||||
| 	End     time.Time | ||||
| } | ||||
|  | ||||
| type PostTxCommitMeta struct { | ||||
| 	ConstructorContext context.Context | ||||
| 	Init               time.Time | ||||
| 	Start              time.Time | ||||
| 	End                time.Time | ||||
| 	ExecCounter        int | ||||
| 	QueryCounter       int | ||||
| } | ||||
|  | ||||
| type PostTxRollbackMeta struct { | ||||
| 	ConstructorContext context.Context | ||||
| 	Init               time.Time | ||||
| 	Start              time.Time | ||||
| 	End                time.Time | ||||
| 	ExecCounter        int | ||||
| 	QueryCounter       int | ||||
| } | ||||
|  | ||||
| type PostQueryMeta struct { | ||||
| 	Context                       context.Context | ||||
| 	TransactionConstructorContext context.Context | ||||
| 	Init                          time.Time | ||||
| 	Start                         time.Time | ||||
| 	End                           time.Time | ||||
| } | ||||
|  | ||||
| type PostExecMeta struct { | ||||
| 	Context                       context.Context | ||||
| 	TransactionConstructorContext context.Context | ||||
| 	Init                          time.Time | ||||
| 	Start                         time.Time | ||||
| 	End                           time.Time | ||||
| } | ||||
|  | ||||
| type Listener interface { | ||||
| 	PrePing(ctx context.Context) error | ||||
| 	PreTxBegin(ctx context.Context, txid uint16) error | ||||
| 	PreTxCommit(txid uint16) error | ||||
| 	PreTxRollback(txid uint16) error | ||||
| 	PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||
| 	PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||
| 	PrePing(ctx context.Context, meta PrePingMeta) error | ||||
| 	PreTxBegin(ctx context.Context, txid uint16, meta PreTxBeginMeta) error | ||||
| 	PreTxCommit(txid uint16, meta PreTxCommitMeta) error | ||||
| 	PreTxRollback(txid uint16, meta PreTxRollbackMeta) error | ||||
| 	PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error | ||||
| 	PreExec(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error | ||||
|  | ||||
| 	PostPing(result error) | ||||
| 	PostTxBegin(txid uint16, result error) | ||||
| 	PostTxCommit(txid uint16, result error) | ||||
| 	PostTxRollback(txid uint16, result error) | ||||
| 	PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| 	PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| 	PostPing(result error, meta PostPingMeta) | ||||
| 	PostTxBegin(txid uint16, result error, meta PostTxBeginMeta) | ||||
| 	PostTxCommit(txid uint16, result error, meta PostTxCommitMeta) | ||||
| 	PostTxRollback(txid uint16, result error, meta PostTxRollbackMeta) | ||||
| 	PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) | ||||
| 	PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) | ||||
| } | ||||
|  | ||||
| type genListener struct { | ||||
| 	prePing        func(ctx context.Context) error | ||||
| 	preTxBegin     func(ctx context.Context, txid uint16) error | ||||
| 	preTxCommit    func(txid uint16) error | ||||
| 	preTxRollback  func(txid uint16) error | ||||
| 	preQuery       func(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||
| 	preExec        func(ctx context.Context, txID *uint16, sql *string, params *PP) error | ||||
| 	postPing       func(result error) | ||||
| 	postTxBegin    func(txid uint16, result error) | ||||
| 	postTxCommit   func(txid uint16, result error) | ||||
| 	postTxRollback func(txid uint16, result error) | ||||
| 	postQuery      func(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| 	postExec       func(txID *uint16, sqlOriginal string, sqlReal string, params PP) | ||||
| 	prePing        func(ctx context.Context, meta PrePingMeta) error | ||||
| 	preTxBegin     func(ctx context.Context, txid uint16, meta PreTxBeginMeta) error | ||||
| 	preTxCommit    func(txid uint16, meta PreTxCommitMeta) error | ||||
| 	preTxRollback  func(txid uint16, meta PreTxRollbackMeta) error | ||||
| 	preQuery       func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error | ||||
| 	preExec        func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error | ||||
| 	postPing       func(result error, meta PostPingMeta) | ||||
| 	postTxBegin    func(txid uint16, result error, meta PostTxBeginMeta) | ||||
| 	postTxCommit   func(txid uint16, result error, meta PostTxCommitMeta) | ||||
| 	postTxRollback func(txid uint16, result error, meta PostTxRollbackMeta) | ||||
| 	postQuery      func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) | ||||
| 	postExec       func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) | ||||
| } | ||||
|  | ||||
| func (g genListener) PrePing(ctx context.Context) error { | ||||
| func (g genListener) PrePing(ctx context.Context, meta PrePingMeta) error { | ||||
| 	if g.prePing != nil { | ||||
| 		return g.prePing(ctx) | ||||
| 		return g.prePing(ctx, meta) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxBegin(ctx context.Context, txid uint16) error { | ||||
| func (g genListener) PreTxBegin(ctx context.Context, txid uint16, meta PreTxBeginMeta) error { | ||||
| 	if g.preTxBegin != nil { | ||||
| 		return g.preTxBegin(ctx, txid) | ||||
| 		return g.preTxBegin(ctx, txid, meta) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxCommit(txid uint16) error { | ||||
| func (g genListener) PreTxCommit(txid uint16, meta PreTxCommitMeta) error { | ||||
| 	if g.preTxCommit != nil { | ||||
| 		return g.preTxCommit(txid) | ||||
| 		return g.preTxCommit(txid, meta) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreTxRollback(txid uint16) error { | ||||
| func (g genListener) PreTxRollback(txid uint16, meta PreTxRollbackMeta) error { | ||||
| 	if g.preTxRollback != nil { | ||||
| 		return g.preTxRollback(txid) | ||||
| 		return g.preTxRollback(txid, meta) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| func (g genListener) PreQuery(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error { | ||||
| 	if g.preQuery != nil { | ||||
| 		return g.preQuery(ctx, txID, sql, params) | ||||
| 		return g.preQuery(ctx, txID, sql, params, meta) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| func (g genListener) PreExec(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error { | ||||
| 	if g.preExec != nil { | ||||
| 		return g.preExec(ctx, txID, sql, params) | ||||
| 		return g.preExec(ctx, txID, sql, params, meta) | ||||
| 	} else { | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostPing(result error) { | ||||
| func (g genListener) PostPing(result error, meta PostPingMeta) { | ||||
| 	if g.postPing != nil { | ||||
| 		g.postPing(result) | ||||
| 		g.postPing(result, meta) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxBegin(txid uint16, result error) { | ||||
| func (g genListener) PostTxBegin(txid uint16, result error, meta PostTxBeginMeta) { | ||||
| 	if g.postTxBegin != nil { | ||||
| 		g.postTxBegin(txid, result) | ||||
| 		g.postTxBegin(txid, result, meta) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxCommit(txid uint16, result error) { | ||||
| func (g genListener) PostTxCommit(txid uint16, result error, meta PostTxCommitMeta) { | ||||
| 	if g.postTxCommit != nil { | ||||
| 		g.postTxCommit(txid, result) | ||||
| 		g.postTxCommit(txid, result, meta) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostTxRollback(txid uint16, result error) { | ||||
| func (g genListener) PostTxRollback(txid uint16, result error, meta PostTxRollbackMeta) { | ||||
| 	if g.postTxRollback != nil { | ||||
| 		g.postTxRollback(txid, result) | ||||
| 		g.postTxRollback(txid, result, meta) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| func (g genListener) PostQuery(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) { | ||||
| 	if g.postQuery != nil { | ||||
| 		g.postQuery(txID, sqlOriginal, sqlReal, params) | ||||
| 		g.postQuery(txID, sqlOriginal, sqlReal, params, result, meta) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| func (g genListener) PostExec(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) { | ||||
| 	if g.postExec != nil { | ||||
| 		g.postExec(txID, sqlOriginal, sqlReal, params) | ||||
| 		g.postExec(txID, sqlOriginal, sqlReal, params, result, meta) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewPrePingListener(f func(ctx context.Context) error) Listener { | ||||
| func NewPrePingListener(f func(ctx context.Context, meta PrePingMeta) error) Listener { | ||||
| 	return genListener{prePing: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxBeginListener(f func(ctx context.Context, txid uint16) error) Listener { | ||||
| func NewPreTxBeginListener(f func(ctx context.Context, txid uint16, meta PreTxBeginMeta) error) Listener { | ||||
| 	return genListener{preTxBegin: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxCommitListener(f func(txid uint16) error) Listener { | ||||
| func NewPreTxCommitListener(f func(txid uint16, meta PreTxCommitMeta) error) Listener { | ||||
| 	return genListener{preTxCommit: f} | ||||
| } | ||||
|  | ||||
| func NewPreTxRollbackListener(f func(txid uint16) error) Listener { | ||||
| func NewPreTxRollbackListener(f func(txid uint16, meta PreTxRollbackMeta) error) Listener { | ||||
| 	return genListener{preTxRollback: f} | ||||
| } | ||||
|  | ||||
| func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| func NewPreQueryListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error) Listener { | ||||
| 	return genListener{preQuery: f} | ||||
| } | ||||
|  | ||||
| func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| func NewPreExecListener(f func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error) Listener { | ||||
| 	return genListener{preExec: f} | ||||
| } | ||||
|  | ||||
| func NewPreListener(f func(ctx context.Context, cmdtype string, txID *uint16, sql *string, params *PP) error) Listener { | ||||
| 	return genListener{ | ||||
| 		preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 		preExec: func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreExecMeta) error { | ||||
| 			return f(ctx, "EXEC", txID, sql, params) | ||||
| 		}, | ||||
| 		preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP) error { | ||||
| 		preQuery: func(ctx context.Context, txID *uint16, sql *string, params *PP, meta PreQueryMeta) error { | ||||
| 			return f(ctx, "QUERY", txID, sql, params) | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func NewPostPingListener(f func(result error)) Listener { | ||||
| func NewPostPingListener(f func(result error, meta PostPingMeta)) Listener { | ||||
| 	return genListener{postPing: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxBeginListener(f func(txid uint16, result error)) Listener { | ||||
| func NewPostTxBeginListener(f func(txid uint16, result error, meta PostTxBeginMeta)) Listener { | ||||
| 	return genListener{postTxBegin: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxCommitListener(f func(txid uint16, result error)) Listener { | ||||
| func NewPostTxCommitListener(f func(txid uint16, result error, meta PostTxCommitMeta)) Listener { | ||||
| 	return genListener{postTxCommit: f} | ||||
| } | ||||
|  | ||||
| func NewPostTxRollbackListener(f func(txid uint16, result error)) Listener { | ||||
| func NewPostTxRollbackListener(f func(txid uint16, result error, meta PostTxRollbackMeta)) Listener { | ||||
| 	return genListener{postTxRollback: f} | ||||
| } | ||||
|  | ||||
| func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| func NewPostQueryListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta)) Listener { | ||||
| 	return genListener{postQuery: f} | ||||
| } | ||||
|  | ||||
| func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| func NewPostExecListener(f func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta)) Listener { | ||||
| 	return genListener{postExec: f} | ||||
| } | ||||
|  | ||||
| func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, params PP)) Listener { | ||||
| func NewPostListener(f func(cmdtype string, txID *uint16, sqlOriginal string, sqlReal string, result error, params PP)) Listener { | ||||
| 	return genListener{ | ||||
| 		postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 			f("EXEC", txID, sqlOriginal, sqlReal, params) | ||||
| 		postExec: func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostExecMeta) { | ||||
| 			f("EXEC", txID, sqlOriginal, sqlReal, result, params) | ||||
| 		}, | ||||
| 		postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP) { | ||||
| 			f("QUERY", txID, sqlOriginal, sqlReal, params) | ||||
| 		postQuery: func(txID *uint16, sqlOriginal string, sqlReal string, params PP, result error, meta PostQueryMeta) { | ||||
| 			f("QUERY", txID, sqlOriginal, sqlReal, result, params) | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -6,6 +6,7 @@ import ( | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| type TxStatus string | ||||
| @@ -26,62 +27,78 @@ type Tx interface { | ||||
| } | ||||
|  | ||||
| type transaction struct { | ||||
| 	tx       *sqlx.Tx | ||||
| 	id       uint16 | ||||
| 	status   TxStatus | ||||
| 	execCtr  int | ||||
| 	queryCtr int | ||||
| 	db       *database | ||||
| 	constructorContext context.Context | ||||
| 	tx                 *sqlx.Tx | ||||
| 	id                 uint16 | ||||
| 	status             TxStatus | ||||
| 	execCtr            int | ||||
| 	queryCtr           int | ||||
| 	db                 *database | ||||
| } | ||||
|  | ||||
| func NewTransaction(xtx *sqlx.Tx, txid uint16, db *database) Tx { | ||||
| func newTransaction(ctx context.Context, xtx *sqlx.Tx, txid uint16, db *database) Tx { | ||||
| 	return &transaction{ | ||||
| 		tx:       xtx, | ||||
| 		id:       txid, | ||||
| 		status:   TxStatusInitial, | ||||
| 		execCtr:  0, | ||||
| 		queryCtr: 0, | ||||
| 		db:       db, | ||||
| 		constructorContext: ctx, | ||||
| 		tx:                 xtx, | ||||
| 		id:                 txid, | ||||
| 		status:             TxStatusInitial, | ||||
| 		execCtr:            0, | ||||
| 		queryCtr:           0, | ||||
| 		db:                 db, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (tx *transaction) Rollback() error { | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	preMeta := PreTxRollbackMeta{ConstructorContext: tx.constructorContext} | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreTxRollback(tx.id) | ||||
| 		err := v.PreTxRollback(tx.id, preMeta) | ||||
| 		if err != nil { | ||||
| 			return exerr.Wrap(err, "failed to call SQL pre-rollback listener").Int("tx.id", int(tx.id)).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	t1 := time.Now() | ||||
|  | ||||
| 	result := tx.tx.Rollback() | ||||
|  | ||||
| 	if result == nil { | ||||
| 		tx.status = TxStatusRollback | ||||
| 	} | ||||
|  | ||||
| 	postMeta := PostTxRollbackMeta{ConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now(), ExecCounter: tx.execCtr, QueryCounter: tx.queryCtr} | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		v.PostTxRollback(tx.id, result) | ||||
| 		v.PostTxRollback(tx.id, result, postMeta) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func (tx *transaction) Commit() error { | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	preMeta := PreTxCommitMeta{ConstructorContext: tx.constructorContext} | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreTxCommit(tx.id) | ||||
| 		err := v.PreTxCommit(tx.id, preMeta) | ||||
| 		if err != nil { | ||||
| 			return exerr.Wrap(err, "failed to call SQL pre-commit listener").Int("tx.id", int(tx.id)).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	t1 := time.Now() | ||||
|  | ||||
| 	result := tx.tx.Commit() | ||||
|  | ||||
| 	if result == nil { | ||||
| 		tx.status = TxStatusComitted | ||||
| 	} | ||||
|  | ||||
| 	postMeta := PostTxCommitMeta{ConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now(), ExecCounter: tx.execCtr, QueryCounter: tx.queryCtr} | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		v.PostTxRollback(tx.id, result) | ||||
| 		v.PostTxCommit(tx.id, result, postMeta) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| @@ -89,21 +106,29 @@ func (tx *transaction) Commit() error { | ||||
|  | ||||
| func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Result, error) { | ||||
| 	origsql := sqlstr | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	preMeta := PreExecMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext} | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||
| 		err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep, preMeta) | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	t1 := time.Now() | ||||
|  | ||||
| 	res, err := tx.tx.NamedExecContext(ctx, sqlstr, prep) | ||||
| 	tx.execCtr++ | ||||
|  | ||||
| 	if tx.status == TxStatusInitial && err == nil { | ||||
| 		tx.status = TxStatusActive | ||||
| 	} | ||||
|  | ||||
| 	postMeta := PostExecMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now()} | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep) | ||||
| 		v.PostExec(langext.Ptr(tx.id), origsql, sqlstr, prep, err, postMeta) | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| @@ -114,21 +139,29 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re | ||||
|  | ||||
| func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Rows, error) { | ||||
| 	origsql := sqlstr | ||||
|  | ||||
| 	t0 := time.Now() | ||||
|  | ||||
| 	preMeta := PreQueryMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext} | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||
| 		err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep, preMeta) | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	t1 := time.Now() | ||||
|  | ||||
| 	rows, err := sqlx.NamedQueryContext(ctx, tx.tx, sqlstr, prep) | ||||
| 	tx.queryCtr++ | ||||
|  | ||||
| 	if tx.status == TxStatusInitial && err == nil { | ||||
| 		tx.status = TxStatusActive | ||||
| 	} | ||||
|  | ||||
| 	postMeta := PostQueryMeta{Context: ctx, TransactionConstructorContext: tx.constructorContext, Init: t0, Start: t1, End: time.Now()} | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep) | ||||
| 		v.PostQuery(langext.Ptr(tx.id), origsql, sqlstr, prep, err, postMeta) | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
|   | ||||
							
								
								
									
										44
									
								
								timeext/diff.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										44
									
								
								timeext/diff.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,44 @@ | ||||
| package timeext | ||||
|  | ||||
| import "time" | ||||
|  | ||||
| // YearDifference calculates the difference between two timestamps in years. | ||||
| // = t1 - t2 | ||||
| // returns a float value | ||||
| func YearDifference(t1 time.Time, t2 time.Time, tz *time.Location) float64 { | ||||
|  | ||||
| 	yDelta := float64(t1.Year() - t2.Year()) | ||||
|  | ||||
| 	processT1 := float64(t1.Sub(TimeToYearStart(t1, tz))) / float64(TimeToYearEnd(t1, tz).Sub(TimeToYearStart(t1, tz))) | ||||
| 	processT2 := float64(t2.Sub(TimeToYearStart(t2, tz))) / float64(TimeToYearEnd(t2, tz).Sub(TimeToYearStart(t2, tz))) | ||||
|  | ||||
| 	return yDelta + (processT1 - processT2) | ||||
| } | ||||
|  | ||||
| // MonthDifference calculates the difference between two timestamps in months. | ||||
| // = t1 - t2 | ||||
| // returns a float value | ||||
| func MonthDifference(t1 time.Time, t2 time.Time) float64 { | ||||
|  | ||||
| 	yDelta := float64(t1.Year() - t2.Year()) | ||||
| 	mDelta := float64(t1.Month() - t2.Month()) | ||||
|  | ||||
| 	dDelta := float64(0) | ||||
|  | ||||
| 	t1MonthDays := DaysInMonth(t1) | ||||
| 	t2MonthDays := DaysInMonth(t2) | ||||
|  | ||||
| 	if t2.Year() > t1.Year() || (t2.Year() == t1.Year() && t2.Month() > t1.Month()) { | ||||
| 		dDelta -= 1 | ||||
| 		dDelta += float64(t1MonthDays-t1.Day()) / float64(t1MonthDays) | ||||
| 		dDelta += float64(t2.Day()) / float64(t2MonthDays) | ||||
| 	} else if t2.Year() < t1.Year() || (t2.Year() == t1.Year() && t2.Month() < t1.Month()) { | ||||
| 		dDelta -= 1 | ||||
| 		dDelta += float64(t1.Day()) / float64(t1MonthDays) | ||||
| 		dDelta += float64(t2MonthDays-t2.Day()) / float64(t2MonthDays) | ||||
| 	} else { | ||||
| 		dDelta += float64(t1.Day()-t2.Day()) / float64(t1MonthDays) | ||||
| 	} | ||||
|  | ||||
| 	return yDelta*12 + mDelta + dDelta | ||||
| } | ||||
							
								
								
									
										143
									
								
								timeext/diff_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								timeext/diff_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | ||||
| package timeext | ||||
|  | ||||
| import ( | ||||
| 	"math" | ||||
| 	"testing" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| func TestYearDifferenceWithSameYearAndDay(t *testing.T) { | ||||
| 	t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 0.0 | ||||
| 	result := YearDifference(t1, t2, time.UTC) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestYearDifferenceWithOneYearApart(t *testing.T) { | ||||
| 	t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 1.0 | ||||
| 	result := YearDifference(t1, t2, time.UTC) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestYearDifferenceWithDifferentMonths(t *testing.T) { | ||||
| 	t1 := time.Date(2020, 6, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 0.4166666666666667 // Approximation of 5/12 months | ||||
| 	result := YearDifference(t1, t2, time.UTC) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestYearDifferenceAcrossYears(t *testing.T) { | ||||
| 	t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2020, 6, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 0.5833333333333334 // Approximation of 7/12 months | ||||
| 	result := YearDifference(t1, t2, time.UTC) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestYearDifferenceWithTimezone(t *testing.T) { | ||||
| 	tz, _ := time.LoadLocation("America/New_York") | ||||
| 	t1 := time.Date(2021, 1, 1, 0, 0, 0, 0, tz) | ||||
| 	t2 := time.Date(2020, 6, 1, 0, 0, 0, 0, tz) | ||||
| 	expected := 0.5833333333333334 // Same as UTC but ensuring timezone is considered | ||||
| 	result := YearDifference(t1, t2, tz) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestYearDifferenceWithNegativeDifference(t *testing.T) { | ||||
| 	t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, TimezoneBerlin) | ||||
| 	t2 := time.Date(2021, 1, 1, 0, 0, 0, 0, TimezoneBerlin) | ||||
| 	expected := -1.0 | ||||
| 	result := YearDifference(t1, t2, TimezoneBerlin) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestYearDifferenceWithNegativeDifference2(t *testing.T) { | ||||
| 	t1 := time.Date(2020, 7, 1, 0, 0, 0, 0, TimezoneBerlin) | ||||
| 	t2 := time.Date(2021, 7, 1, 0, 0, 0, 0, TimezoneBerlin) | ||||
| 	expected := -1.0 | ||||
| 	result := YearDifference(t1, t2, TimezoneBerlin) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func epsilonEquals(a, b float64) bool { | ||||
| 	epsilon := 0.01 | ||||
| 	return math.Abs(a-b) < epsilon | ||||
| } | ||||
|  | ||||
| func TestMonthDifferenceSameDate(t *testing.T) { | ||||
| 	t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 0.0 | ||||
| 	result := MonthDifference(t2, t1) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestMonthDifferenceSameMonth(t *testing.T) { | ||||
| 	t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2022, 1, 31, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 0.967741935483871 // Approximation of 30/31 days | ||||
| 	result := MonthDifference(t2, t1) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestMonthDifferenceDifferentMonthsSameYear(t *testing.T) { | ||||
| 	t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2022, 3, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 2.0 | ||||
| 	result := MonthDifference(t2, t1) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestMonthDifferenceDifferentYears(t *testing.T) { | ||||
| 	t1 := time.Date(2021, 12, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2022, 2, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 2.0 | ||||
| 	result := MonthDifference(t2, t1) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestMonthDifferenceT1BeforeT2(t *testing.T) { | ||||
| 	t1 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2022, 6, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := 5.0 | ||||
| 	result := MonthDifference(t2, t1) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestMonthDifferenceT1AfterT2(t *testing.T) { | ||||
| 	t1 := time.Date(2022, 6, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	t2 := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) | ||||
| 	expected := -5.0 | ||||
| 	result := MonthDifference(t2, t1) | ||||
| 	if !epsilonEquals(result, expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
| @@ -65,6 +65,10 @@ func TimeToYearEnd(t time.Time, tz *time.Location) time.Time { | ||||
| 	return TimeToYearStart(t, tz).AddDate(1, 0, 0).Add(-1) | ||||
| } | ||||
|  | ||||
| func TimeToNextYearStart(t time.Time, tz *time.Location) time.Time { | ||||
| 	return TimeToYearStart(t, tz).AddDate(1, 0, 0) | ||||
| } | ||||
|  | ||||
| // IsSameDayIncludingDateBoundaries returns true if t1 and t2 are part of the same day (TZ/Berlin), the boundaries of the day are | ||||
| // inclusive, this means 2021-09-15T00:00:00 is still part of the day 2021-09-14 | ||||
| func IsSameDayIncludingDateBoundaries(t1 time.Time, t2 time.Time, tz *time.Location) bool { | ||||
| @@ -180,3 +184,10 @@ func AddYears(t time.Time, yearCount float64, tz *time.Location) time.Time { | ||||
|  | ||||
| 	return t.Add(time.Duration(float64(t1.Sub(t0)) * floatCount)) | ||||
| } | ||||
|  | ||||
| func DaysInMonth(t time.Time) int { | ||||
| 	// https://stackoverflow.com/a/73882035/1761622 | ||||
|  | ||||
| 	y, m, _ := t.Date() | ||||
| 	return time.Date(y, m+1, 0, 0, 0, 0, 0, time.UTC).Day() | ||||
| } | ||||
|   | ||||
| @@ -72,6 +72,13 @@ func TestIsSunday(t *testing.T) { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestIsSunday_OnSunday(t *testing.T) { | ||||
| 	sunday := time.Date(2022, 5, 15, 0, 0, 0, 0, TimezoneBerlin) // A Sunday | ||||
| 	if !IsSunday(sunday, TimezoneBerlin) { | ||||
| 		t.Errorf("Expected true for Sunday") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestDurationFromTime(t *testing.T) { | ||||
| 	expected := time.Duration(13*time.Hour + 14*time.Minute + 15*time.Second) | ||||
| 	result := DurationFromTime(13, 14, 15) | ||||
| @@ -156,3 +163,67 @@ func TestAddYears(t *testing.T) { | ||||
| 		t.Errorf("Expected %v but got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestIsDatePartEqual_SameDateDifferentTimes(t *testing.T) { | ||||
| 	tz := time.UTC | ||||
| 	t1 := time.Date(2022, 5, 18, 10, 30, 0, 0, tz) | ||||
| 	t2 := time.Date(2022, 5, 18, 20, 45, 0, 0, tz) | ||||
| 	if !IsDatePartEqual(t1, t2, tz) { | ||||
| 		t.Errorf("Expected dates to be equal") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestWithTimePart_ChangeTime(t *testing.T) { | ||||
| 	base := time.Date(2022, 5, 18, 0, 0, 0, 0, time.UTC) | ||||
| 	result := WithTimePart(base, 15, 30, 45) | ||||
| 	expected := time.Date(2022, 5, 18, 15, 30, 45, 0, time.UTC) | ||||
| 	if !result.Equal(expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestCombineDateAndTime_CombineDifferentParts(t *testing.T) { | ||||
| 	date := time.Date(2022, 5, 18, 0, 0, 0, 0, time.UTC) | ||||
| 	timePart := time.Date(2000, 1, 1, 15, 30, 45, 0, time.UTC) | ||||
| 	result := CombineDateAndTime(date, timePart) | ||||
| 	expected := time.Date(2022, 5, 18, 15, 30, 45, 0, time.UTC) | ||||
| 	if !result.Equal(expected) { | ||||
| 		t.Errorf("Expected %v, got %v", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestDaysInMonth_31Days(t *testing.T) { | ||||
| 	date := time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC) // January | ||||
| 	expected := 31 | ||||
| 	result := DaysInMonth(date) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Expected %d but got %d", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestDaysInMonth_30Days(t *testing.T) { | ||||
| 	date := time.Date(2022, 4, 1, 0, 0, 0, 0, time.UTC) // April | ||||
| 	expected := 30 | ||||
| 	result := DaysInMonth(date) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Expected %d but got %d", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestDaysInMonth_FebruaryLeapYear(t *testing.T) { | ||||
| 	date := time.Date(2020, 2, 1, 0, 0, 0, 0, time.UTC) // February in a leap year | ||||
| 	expected := 29 | ||||
| 	result := DaysInMonth(date) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Expected %d but got %d", expected, result) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestDaysInMonth_FebruaryNonLeapYear(t *testing.T) { | ||||
| 	date := time.Date(2021, 2, 1, 0, 0, 0, 0, time.UTC) // February in a non-leap year | ||||
| 	expected := 28 | ||||
| 	result := DaysInMonth(date) | ||||
| 	if result != expected { | ||||
| 		t.Errorf("Expected %d but got %d", expected, result) | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -120,25 +120,25 @@ func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirecti | ||||
|  | ||||
| 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | ||||
| 	if err != nil { | ||||
| 		return ct.CursorToken{}, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build() | ||||
| 		return nil, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build() | ||||
| 	} | ||||
|  | ||||
| 	valueSeconary := "" | ||||
| 	if fieldSecondary != nil && dirSecondary != nil { | ||||
| 		valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary) | ||||
| 		if err != nil { | ||||
| 			return ct.CursorToken{}, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build() | ||||
| 			return nil, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return ct.CursorToken{ | ||||
| 		Mode:           ct.CTMNormal, | ||||
| 		ValuePrimary:   valuePrimary, | ||||
| 		ValueSecondary: valueSeconary, | ||||
| 		Direction:      dirPrimary, | ||||
| 		PageSize:       langext.Coalesce(pageSize, 0), | ||||
| 		Extra:          ct.Extra{}, | ||||
| 	}, nil | ||||
| 	return ct.NewKeySortToken( | ||||
| 		valuePrimary, | ||||
| 		valueSeconary, | ||||
| 		dirPrimary, | ||||
| 		dirPrimary, | ||||
| 		langext.Coalesce(pageSize, 0), | ||||
| 		ct.Extra{}, | ||||
| 	), nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool { | ||||
|   | ||||
| @@ -7,9 +7,10 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/mongo/options" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"iter" | ||||
| ) | ||||
|  | ||||
| func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { | ||||
| func (c *Coll[TData]) createFindQuery(ctx context.Context, filter bson.M, opts ...*options.FindOptions) (*mongo.Cursor, error) { | ||||
|  | ||||
| 	pipeline := mongo.Pipeline{} | ||||
| 	pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||
| @@ -64,6 +65,16 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options. | ||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||
| 	} | ||||
|  | ||||
| 	return cursor, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { | ||||
|  | ||||
| 	cursor, err := c.createFindQuery(ctx, filter, opts...) | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "").Build() | ||||
| 	} | ||||
|  | ||||
| 	defer func() { _ = cursor.Close(ctx) }() | ||||
|  | ||||
| 	res, err := c.decodeAll(ctx, cursor) | ||||
| @@ -74,6 +85,57 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options. | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) FindIterateFunc(ctx context.Context, filter bson.M, fn func(v TData) error, opts ...*options.FindOptions) error { | ||||
|  | ||||
| 	cursor, err := c.createFindQuery(ctx, filter, opts...) | ||||
| 	if err != nil { | ||||
| 		return exerr.Wrap(err, "").Build() | ||||
| 	} | ||||
|  | ||||
| 	defer func() { _ = cursor.Close(ctx) }() | ||||
|  | ||||
| 	for cursor.Next(ctx) { | ||||
|  | ||||
| 		v, err := c.decodeSingle(ctx, cursor) | ||||
| 		if err != nil { | ||||
| 			return exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
|  | ||||
| 		err = fn(v) | ||||
| 		if err != nil { | ||||
| 			return exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) FindIterate(ctx context.Context, filter bson.M, opts ...*options.FindOptions) iter.Seq2[TData, error] { | ||||
| 	cursor, err := c.createFindQuery(ctx, filter, opts...) | ||||
| 	if err != nil { | ||||
| 		return langext.IterSingleValueSeq2[TData, error](*new(TData), exerr.Wrap(err, "").Build()) | ||||
| 	} | ||||
|  | ||||
| 	return func(yield func(TData, error) bool) { | ||||
| 		defer func() { _ = cursor.Close(ctx) }() | ||||
|  | ||||
| 		for cursor.Next(ctx) { | ||||
| 			v, err := c.decodeSingle(ctx, cursor) | ||||
| 			if err != nil { | ||||
| 				if !yield(*new(TData), err) { | ||||
| 					return | ||||
| 				} | ||||
| 				continue | ||||
| 			} | ||||
|  | ||||
| 			if !yield(v, nil) { | ||||
| 				return | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // converts FindOptions to AggregateOptions | ||||
| func convertFindOpt(v *options.FindOptions) (*options.AggregateOptions, error) { | ||||
| 	if v == nil { | ||||
|   | ||||
| @@ -12,7 +12,11 @@ import ( | ||||
| func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) { | ||||
| 	r, err := c.findOneInternal(ctx, filter, false) | ||||
| 	if err != nil { | ||||
| 		return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build() | ||||
| 		if filterId, ok := filter["_id"]; ok { | ||||
| 			return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Any("filter", filter).Any("filter_id", filterId).Build() | ||||
| 		} else { | ||||
| 			return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Any("filter", filter).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return *r, nil | ||||
| @@ -21,7 +25,7 @@ func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) | ||||
| func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) { | ||||
| 	r, err := c.findOneInternal(ctx, filter, true) | ||||
| 	if err != nil { | ||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build() | ||||
| 		return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Any("filter", filter).Build() | ||||
| 	} | ||||
|  | ||||
| 	return r, nil | ||||
| @@ -58,7 +62,11 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN | ||||
| 			return nil, nil | ||||
| 		} | ||||
| 		if err != nil { | ||||
| 			return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).NoLog().Build() | ||||
| 			if filterId, ok := filter["_id"]; ok { | ||||
| 				return nil, exerr.Wrap(err, "mongo-query[find-one|internal] failed").Str("collection", c.Name()).Any("filter", filter).Any("filter_id", filterId).NoLog().Build() | ||||
| 			} else { | ||||
| 				return nil, exerr.Wrap(err, "mongo-query[find-one|internal] failed").Str("collection", c.Name()).Any("filter", filter).NoLog().Build() | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		return &res, nil | ||||
|   | ||||
							
								
								
									
										406
									
								
								wmo/queryList.go
									
									
									
									
									
								
							
							
						
						
									
										406
									
								
								wmo/queryList.go
									
									
									
									
									
								
							| @@ -7,103 +7,103 @@ import ( | ||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"iter" | ||||
| ) | ||||
|  | ||||
| func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | ||||
| 	if inTok.Mode == ct.CTMEnd { | ||||
| 		return make([]TData, 0), ct.End(), nil | ||||
| 	if inTok == nil { | ||||
| 		inTok = ct.Start() | ||||
| 	} | ||||
|  | ||||
| 	if pageSize != nil && *pageSize == 0 { | ||||
| 		return make([]TData, 0), inTok, nil // fast track, we return an empty list and do not advance the cursor token | ||||
| 	if ctks, ok := inTok.(ct.CTKeySort); ok { | ||||
| 		d, tok, err := c.listWithKSToken(ctx, filter, pageSize, ctks) | ||||
| 		if err != nil { | ||||
| 			return nil, ct.End(), err | ||||
| 		} | ||||
| 		return d, tok, nil | ||||
| 	} else if ctpag, ok := inTok.(ct.CTPaginated); ok { | ||||
| 		d, tok, err := c.listWithPaginatedToken(ctx, filter, pageSize, ctpag) | ||||
| 		if err != nil { | ||||
| 			return nil, ct.End(), err | ||||
| 		} | ||||
| 		return d, tok, nil | ||||
| 	} else { | ||||
| 		return nil, ct.End(), exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build() | ||||
| 	} | ||||
| } | ||||
|  | ||||
| 	pipeline := mongo.Pipeline{} | ||||
| 	pf1 := "_id" | ||||
| 	pd1 := ct.SortASC | ||||
| 	pf2 := "_id" | ||||
| 	pd2 := ct.SortASC | ||||
| func (c *Coll[TData]) ListIterateFunc(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken, fn func(v TData) error) error { | ||||
| 	var cursor *mongo.Cursor | ||||
| 	var err error | ||||
|  | ||||
| 	if filter != nil { | ||||
| 		pipeline = filter.FilterQuery(ctx) | ||||
| 		pf1, pd1, pf2, pd2 = filter.Pagination(ctx) | ||||
| 	} | ||||
|  | ||||
| 	sortPrimary := pf1 | ||||
| 	sortDirPrimary := pd1 | ||||
| 	sortSecondary := &pf2 | ||||
| 	sortDirSecondary := &pd2 | ||||
|  | ||||
| 	if pf1 == pf2 { | ||||
| 		sortSecondary = nil | ||||
| 		sortDirSecondary = nil | ||||
| 	} | ||||
|  | ||||
| 	paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||
| 	if err != nil { | ||||
| 		return nil, ct.CursorToken{}, exerr. | ||||
| 			Wrap(err, "failed to create pagination"). | ||||
| 			WithType(exerr.TypeCursorTokenDecode). | ||||
| 			Str("collection", c.Name()). | ||||
| 			Any("inTok", inTok). | ||||
| 			Any("sortPrimary", sortPrimary). | ||||
| 			Any("sortDirPrimary", sortDirPrimary). | ||||
| 			Any("sortSecondary", sortSecondary). | ||||
| 			Any("sortDirSecondary", sortDirSecondary). | ||||
| 			Any("pageSize", pageSize). | ||||
| 			Build() | ||||
| 	} | ||||
|  | ||||
| 	pipeline = append(pipeline, paginationPipeline...) | ||||
|  | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	if c.needsDoubleSort(ctx) { | ||||
| 		pipeline = langext.ArrConcat(pipeline, doubleSortPipeline) | ||||
| 	} | ||||
|  | ||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||
| 	if err != nil { | ||||
| 		return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||
| 	if ctks, ok := inTok.(ct.CTKeySort); ok { | ||||
| 		_, _, _, _, cursor, err = c.createKSListQuery(ctx, filter, pageSize, ctks) | ||||
| 		if err != nil { | ||||
| 			return exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
| 	} else if ctpag, ok := inTok.(ct.CTPaginated); ok { | ||||
| 		_, cursor, err = c.createPaginatedListQuery(ctx, filter, pageSize, ctpag) | ||||
| 		if err != nil { | ||||
| 			return exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
| 	} else { | ||||
| 		return exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build() | ||||
| 	} | ||||
|  | ||||
| 	defer func() { _ = cursor.Close(ctx) }() | ||||
|  | ||||
| 	// fast branch | ||||
| 	if pageSize == nil { | ||||
| 		entries, err := c.decodeAll(ctx, cursor) | ||||
| 	for cursor.Next(ctx) { | ||||
|  | ||||
| 		v, err := c.decodeSingle(ctx, cursor) | ||||
| 		if err != nil { | ||||
| 			return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||
| 			return exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
| 		return entries, ct.End(), nil | ||||
| 	} | ||||
|  | ||||
| 	entities := make([]TData, 0, cursor.RemainingBatchLength()) | ||||
| 	for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) { | ||||
| 		var entry TData | ||||
| 		entry, err = c.decodeSingle(ctx, cursor) | ||||
| 		err = fn(v) | ||||
| 		if err != nil { | ||||
| 			return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build() | ||||
| 			return exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
| 		entities = append(entities, entry) | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	if pageSize == nil || len(entities) < *pageSize || !cursor.TryNext(ctx) { | ||||
| 		return entities, ct.End(), nil | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) ListIterate(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) iter.Seq2[TData, error] { | ||||
| 	var cursor *mongo.Cursor | ||||
| 	var err error | ||||
|  | ||||
| 	if ctks, ok := inTok.(ct.CTKeySort); ok { | ||||
| 		_, _, _, _, cursor, err = c.createKSListQuery(ctx, filter, pageSize, ctks) | ||||
| 		if err != nil { | ||||
| 			return langext.IterSingleValueSeq2[TData, error](*new(TData), exerr.Wrap(err, "").Build()) | ||||
| 		} | ||||
| 	} else if ctpag, ok := inTok.(ct.CTPaginated); ok { | ||||
| 		_, cursor, err = c.createPaginatedListQuery(ctx, filter, pageSize, ctpag) | ||||
| 		if err != nil { | ||||
| 			return langext.IterSingleValueSeq2[TData, error](*new(TData), exerr.Wrap(err, "").Build()) | ||||
| 		} | ||||
| 	} else { | ||||
| 		return langext.IterSingleValueSeq2[TData, error](*new(TData), exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build()) | ||||
| 	} | ||||
|  | ||||
| 	last := entities[len(entities)-1] | ||||
| 	return func(yield func(TData, error) bool) { | ||||
| 		defer func() { _ = cursor.Close(ctx) }() | ||||
|  | ||||
| 	c.EnsureInitializedReflection(last) | ||||
| 		for cursor.Next(ctx) { | ||||
| 			v, err := c.decodeSingle(ctx, cursor) | ||||
| 			if err != nil { | ||||
| 				if !yield(*new(TData), err) { | ||||
| 					return | ||||
| 				} | ||||
| 				continue | ||||
| 			} | ||||
|  | ||||
| 	nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) | ||||
| 	if err != nil { | ||||
| 		return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build() | ||||
| 			if !yield(v, nil) { | ||||
| 				return | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return entities, nextToken, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) { | ||||
| @@ -135,17 +135,44 @@ func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, er | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, int64, error) { | ||||
| 	// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page. | ||||
| 	count, err := c.Count(ctx, filter) | ||||
| 	if err != nil { | ||||
| 		return nil, ct.CursorToken{}, 0, err | ||||
| 	} | ||||
|  | ||||
| 	data, token, err := c.List(ctx, filter, pageSize, inTok) | ||||
| 	if err != nil { | ||||
| 		return nil, ct.CursorToken{}, 0, err | ||||
| 	if pageSize != nil && *pageSize == 0 { | ||||
|  | ||||
| 		// fast track, we return an empty list and do not advance the cursor token | ||||
|  | ||||
| 		count, err := c.Count(ctx, filter) | ||||
| 		if err != nil { | ||||
| 			return nil, nil, 0, err | ||||
| 		} | ||||
|  | ||||
| 		return make([]TData, 0), inTok, count, nil | ||||
|  | ||||
| 	} else if pageSize == nil && inTok.IsStart() { | ||||
|  | ||||
| 		// fast track, we simply return len(entries) for count (we query all anyway) | ||||
|  | ||||
| 		data, token, err := c.List(ctx, filter, pageSize, inTok) | ||||
| 		if err != nil { | ||||
| 			return nil, nil, 0, err | ||||
| 		} | ||||
|  | ||||
| 		return data, token, int64(len(data)), nil | ||||
|  | ||||
| 	} else { | ||||
|  | ||||
| 		count, err := c.Count(ctx, filter) | ||||
| 		if err != nil { | ||||
| 			return nil, nil, 0, err | ||||
| 		} | ||||
|  | ||||
| 		data, token, err := c.List(ctx, filter, pageSize, inTok) | ||||
| 		if err != nil { | ||||
| 			return nil, nil, 0, err | ||||
| 		} | ||||
|  | ||||
| 		return data, token, count, nil | ||||
|  | ||||
| 	} | ||||
| 	return data, token, count, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]string, error) { | ||||
| @@ -184,7 +211,186 @@ func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]st | ||||
| 	return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil | ||||
| } | ||||
|  | ||||
| func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) { | ||||
| // ===================================================================================================================== | ||||
|  | ||||
| func (c *Coll[TData]) createKSListQuery(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTKeySort) (string, ct.SortDirection, *string, *ct.SortDirection, *mongo.Cursor, error) { | ||||
| 	pipeline := mongo.Pipeline{} | ||||
| 	pf1 := "_id" | ||||
| 	pd1 := ct.SortASC | ||||
| 	pf2 := "_id" | ||||
| 	pd2 := ct.SortASC | ||||
|  | ||||
| 	if filter != nil { | ||||
| 		pipeline = filter.FilterQuery(ctx) | ||||
| 		pf1, pd1, pf2, pd2 = filter.Pagination(ctx) | ||||
| 	} | ||||
|  | ||||
| 	sortPrimary := pf1 | ||||
| 	sortDirPrimary := pd1 | ||||
| 	sortSecondary := &pf2 | ||||
| 	sortDirSecondary := &pd2 | ||||
|  | ||||
| 	if pf1 == pf2 { | ||||
| 		sortSecondary = nil | ||||
| 		sortDirSecondary = nil | ||||
| 	} | ||||
|  | ||||
| 	paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||
| 	if err != nil { | ||||
| 		return "", "", nil, nil, nil, exerr. | ||||
| 			Wrap(err, "failed to create pagination"). | ||||
| 			WithType(exerr.TypeCursorTokenDecode). | ||||
| 			Str("collection", c.Name()). | ||||
| 			Any("inTok", inTok). | ||||
| 			Any("sortPrimary", sortPrimary). | ||||
| 			Any("sortDirPrimary", sortDirPrimary). | ||||
| 			Any("sortSecondary", sortSecondary). | ||||
| 			Any("sortDirSecondary", sortDirSecondary). | ||||
| 			Any("pageSize", pageSize). | ||||
| 			Build() | ||||
| 	} | ||||
|  | ||||
| 	pipeline = append(pipeline, paginationPipeline...) | ||||
|  | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	if c.needsDoubleSort(ctx) { | ||||
| 		pipeline = langext.ArrConcat(pipeline, doubleSortPipeline) | ||||
| 	} | ||||
|  | ||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||
| 	if err != nil { | ||||
| 		return "", "", nil, nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||
| 	} | ||||
|  | ||||
| 	return sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, cursor, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) createPaginatedListQuery(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTPaginated) (int, *mongo.Cursor, error) { | ||||
| 	var err error | ||||
|  | ||||
| 	page := inTok.Page | ||||
|  | ||||
| 	pipelineSort := mongo.Pipeline{} | ||||
| 	pipelineFilter := mongo.Pipeline{} | ||||
|  | ||||
| 	if filter != nil { | ||||
| 		pipelineFilter = filter.FilterQuery(ctx) | ||||
| 		pf1, pd1, pf2, pd2 := filter.Pagination(ctx) | ||||
|  | ||||
| 		pipelineSort, err = createSortOnlyPipeline(pf1, pd1, &pf2, &pd2) | ||||
| 		if err != nil { | ||||
| 			return 0, nil, exerr.Wrap(err, "failed to create sort pipeline").Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	pipelinePaginate := mongo.Pipeline{} | ||||
| 	if pageSize != nil { | ||||
| 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *pageSize * (page - 1)}}) | ||||
| 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *pageSize}}) | ||||
| 	} else { | ||||
| 		page = 1 | ||||
| 	} | ||||
|  | ||||
| 	pipelineCount := mongo.Pipeline{} | ||||
| 	pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}}) | ||||
|  | ||||
| 	extrModPipelineResolved := mongo.Pipeline{} | ||||
| 	for _, ppl := range c.extraModPipeline { | ||||
| 		extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx)) | ||||
| 	} | ||||
|  | ||||
| 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort) | ||||
|  | ||||
| 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||
| 	if err != nil { | ||||
| 		return 0, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||
| 	} | ||||
|  | ||||
| 	return page, cursorList, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) listWithKSToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTKeySort) ([]TData, ct.CursorToken, error) { | ||||
| 	if inTok.Mode == ct.CTMEnd { | ||||
| 		return make([]TData, 0), ct.End(), nil | ||||
| 	} | ||||
|  | ||||
| 	if pageSize != nil && *pageSize == 0 { | ||||
| 		return make([]TData, 0), inTok, nil // fast track, we return an empty list and do not advance the cursor token | ||||
| 	} | ||||
|  | ||||
| 	sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, cursor, err := c.createKSListQuery(ctx, filter, pageSize, inTok) | ||||
| 	if err != nil { | ||||
| 		return nil, nil, exerr.Wrap(err, "").Build() | ||||
| 	} | ||||
|  | ||||
| 	defer func() { _ = cursor.Close(ctx) }() | ||||
|  | ||||
| 	// fast branch | ||||
| 	if pageSize == nil { | ||||
| 		entries, err := c.decodeAll(ctx, cursor) | ||||
| 		if err != nil { | ||||
| 			return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||
| 		} | ||||
| 		return entries, ct.End(), nil | ||||
| 	} | ||||
|  | ||||
| 	entities := make([]TData, 0, cursor.RemainingBatchLength()) | ||||
| 	for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) { | ||||
| 		var entry TData | ||||
| 		entry, err = c.decodeSingle(ctx, cursor) | ||||
| 		if err != nil { | ||||
| 			return nil, nil, exerr.Wrap(err, "failed to decode entity").Build() | ||||
| 		} | ||||
| 		entities = append(entities, entry) | ||||
| 	} | ||||
|  | ||||
| 	if pageSize == nil || len(entities) < *pageSize || !cursor.TryNext(ctx) { | ||||
| 		return entities, ct.End(), nil | ||||
| 	} | ||||
|  | ||||
| 	last := entities[len(entities)-1] | ||||
|  | ||||
| 	c.EnsureInitializedReflection(last) | ||||
|  | ||||
| 	nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) | ||||
| 	if err != nil { | ||||
| 		return nil, nil, exerr.Wrap(err, "failed to create (out)-token").Build() | ||||
| 	} | ||||
|  | ||||
| 	return entities, nextToken, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) listWithPaginatedToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTPaginated) ([]TData, ct.CursorToken, error) { | ||||
| 	var err error | ||||
|  | ||||
| 	page := inTok.Page | ||||
|  | ||||
| 	if page < 0 { | ||||
| 		page = 1 | ||||
| 	} | ||||
|  | ||||
| 	page, cursorList, err := c.createPaginatedListQuery(ctx, filter, pageSize, inTok) | ||||
| 	if err != nil { | ||||
| 		return nil, nil, exerr.Wrap(err, "").Build() | ||||
| 	} | ||||
|  | ||||
| 	entities, err := c.decodeAll(ctx, cursorList) | ||||
| 	if err != nil { | ||||
| 		return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||
| 	} | ||||
|  | ||||
| 	tokOut := ct.Page(page + 1) | ||||
| 	if pageSize == nil || len(entities) < *pageSize { | ||||
| 		tokOut = ct.PageEnd() | ||||
| 	} | ||||
|  | ||||
| 	return entities, tokOut, nil | ||||
| } | ||||
|  | ||||
| func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CTKeySort, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) { | ||||
|  | ||||
| 	cond := bson.A{} | ||||
| 	sort := bson.D{} | ||||
| @@ -215,7 +421,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | ||||
|  | ||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer) | ||||
| 			cond = append(cond, bson.M{"$and": bson.A{ | ||||
| 				bson.M{fieldPrimary: valuePrimary}, | ||||
| 				bson.M{"$or": bson.A{bson.M{fieldPrimary: valuePrimary}, bson.M{fieldPrimary: nil}, bson.M{fieldPrimary: bson.M{"$exists": false}}}}, | ||||
| 				bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}}, | ||||
| 			}}) | ||||
|  | ||||
| @@ -225,7 +431,7 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | ||||
|  | ||||
| 			// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older) | ||||
| 			cond = append(cond, bson.M{"$and": bson.A{ | ||||
| 				bson.M{fieldPrimary: valuePrimary}, | ||||
| 				bson.M{"$or": bson.A{bson.M{fieldPrimary: valuePrimary}, bson.M{fieldPrimary: nil}, bson.M{fieldPrimary: bson.M{"$exists": false}}}}, | ||||
| 				bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}}, | ||||
| 			}}) | ||||
|  | ||||
| @@ -265,3 +471,33 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | ||||
|  | ||||
| 	return pipeline, pipelineSort, nil | ||||
| } | ||||
|  | ||||
| func createSortOnlyPipeline(fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection) ([]bson.D, error) { | ||||
|  | ||||
| 	sort := bson.D{} | ||||
|  | ||||
| 	if sortPrimary == ct.SortASC { | ||||
| 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary | ||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||
| 	} else if sortPrimary == ct.SortDESC { | ||||
| 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary | ||||
| 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||
| 	} | ||||
|  | ||||
| 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { | ||||
|  | ||||
| 		if *sortSecondary == ct.SortASC { | ||||
|  | ||||
| 			sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1}) | ||||
|  | ||||
| 		} else if *sortSecondary == ct.SortDESC { | ||||
|  | ||||
| 			sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1}) | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}} | ||||
|  | ||||
| 	return pipelineSort, nil | ||||
| } | ||||
|   | ||||
| @@ -7,13 +7,111 @@ import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||
| 	"iter" | ||||
| ) | ||||
|  | ||||
| func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||
| 	page, cursorList, pipelineTotalCount, err := c.createPaginatedQuery(ctx, filter, page, limit) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "").Build() | ||||
| 	} | ||||
|  | ||||
| 	type totalCountResult struct { | ||||
| 		Count int `bson:"count"` | ||||
| 	} | ||||
|  | ||||
| 	entities, err := c.decodeAll(ctx, cursorList) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||
| 	} | ||||
|  | ||||
| 	var tcRes totalCountResult | ||||
|  | ||||
| 	if limit == nil { | ||||
| 		// optimization, limit==nil, so we query all entities anyway, just use the array length | ||||
| 		tcRes.Count = len(entities) | ||||
| 	} else { | ||||
|  | ||||
| 		cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount) | ||||
| 		if err != nil { | ||||
| 			return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||
| 		} | ||||
|  | ||||
| 		if cursorTotalCount.Next(ctx) { | ||||
| 			err = cursorTotalCount.Decode(&tcRes) | ||||
| 			if err != nil { | ||||
| 				return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||
| 			} | ||||
| 		} else { | ||||
| 			tcRes.Count = 0 // no entries in DB | ||||
| 		} | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	paginationObj := pag.Pagination{ | ||||
| 		Page:             page, | ||||
| 		Limit:            langext.Coalesce(limit, tcRes.Count), | ||||
| 		TotalPages:       pag.CalcPaginationTotalPages(tcRes.Count, langext.Coalesce(limit, tcRes.Count)), | ||||
| 		TotalItems:       tcRes.Count, | ||||
| 		CurrentPageCount: len(entities), | ||||
| 	} | ||||
|  | ||||
| 	return entities, paginationObj, nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) PaginateIterateFunc(ctx context.Context, filter pag.MongoFilter, page int, limit *int, fn func(v TData) error) error { | ||||
| 	page, cursor, _, err := c.createPaginatedQuery(ctx, filter, page, limit) | ||||
| 	if err != nil { | ||||
| 		return exerr.Wrap(err, "").Build() | ||||
| 	} | ||||
|  | ||||
| 	defer func() { _ = cursor.Close(ctx) }() | ||||
|  | ||||
| 	for cursor.Next(ctx) { | ||||
|  | ||||
| 		v, err := c.decodeSingle(ctx, cursor) | ||||
| 		if err != nil { | ||||
| 			return exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
|  | ||||
| 		err = fn(v) | ||||
| 		if err != nil { | ||||
| 			return exerr.Wrap(err, "").Build() | ||||
| 		} | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (c *Coll[TData]) PaginateIterate(ctx context.Context, filter pag.MongoFilter, page int, limit *int) iter.Seq2[TData, error] { | ||||
| 	page, cursor, _, err := c.createPaginatedQuery(ctx, filter, page, limit) | ||||
| 	if err != nil { | ||||
| 		return langext.IterSingleValueSeq2[TData, error](*new(TData), exerr.Wrap(err, "").Build()) | ||||
| 	} | ||||
|  | ||||
| 	return func(yield func(TData, error) bool) { | ||||
| 		defer func() { _ = cursor.Close(ctx) }() | ||||
|  | ||||
| 		for cursor.Next(ctx) { | ||||
| 			v, err := c.decodeSingle(ctx, cursor) | ||||
| 			if err != nil { | ||||
| 				if !yield(*new(TData), err) { | ||||
| 					return | ||||
| 				} | ||||
| 				continue | ||||
| 			} | ||||
|  | ||||
| 			if !yield(v, nil) { | ||||
| 				return | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // ===================================================================================================================== | ||||
|  | ||||
| func (c *Coll[TData]) createPaginatedQuery(ctx context.Context, filter pag.MongoFilter, page int, limit *int) (int, *mongo.Cursor, mongo.Pipeline, error) { | ||||
| 	if page < 0 { | ||||
| 		page = 1 | ||||
| 	} | ||||
| @@ -52,36 +150,8 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page | ||||
|  | ||||
| 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||
| 		return 0, nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||
| 	} | ||||
|  | ||||
| 	entities, err := c.decodeAll(ctx, cursorList) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||
| 	} | ||||
|  | ||||
| 	cursorTotalCount, err := c.coll.Aggregate(ctx, pipelineTotalCount) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||
| 	} | ||||
|  | ||||
| 	var tcRes totalCountResult | ||||
| 	if cursorTotalCount.Next(ctx) { | ||||
| 		err = cursorTotalCount.Decode(&tcRes) | ||||
| 		if err != nil { | ||||
| 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode mongo-aggregation $count result").Any("pipeline", pipelineTotalCount).Str("collection", c.Name()).Build() | ||||
| 		} | ||||
| 	} else { | ||||
| 		tcRes.Count = 0 // no entries in DB | ||||
| 	} | ||||
|  | ||||
| 	paginationObj := pag.Pagination{ | ||||
| 		Page:             page, | ||||
| 		Limit:            langext.Coalesce(limit, tcRes.Count), | ||||
| 		TotalPages:       pag.CalcPaginationTotalPages(tcRes.Count, langext.Coalesce(limit, tcRes.Count)), | ||||
| 		TotalItems:       tcRes.Count, | ||||
| 		CurrentPageCount: len(entities), | ||||
| 	} | ||||
|  | ||||
| 	return entities, paginationObj, nil | ||||
| 	return page, cursorList, pipelineTotalCount, nil | ||||
| } | ||||
|   | ||||
							
								
								
									
										1
									
								
								wpdf/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								wpdf/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| wpdf_test.pdf | ||||
							
								
								
									
										
											BIN
										
									
								
								wpdf/logo.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								wpdf/logo.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 11 KiB | 
							
								
								
									
										85
									
								
								wpdf/wpdf.go
									
									
									
									
									
								
							
							
						
						
									
										85
									
								
								wpdf/wpdf.go
									
									
									
									
									
								
							| @@ -3,6 +3,7 @@ package wpdf | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"github.com/jung-kurt/gofpdf" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| type WPDFBuilder struct { | ||||
| @@ -13,6 +14,7 @@ type WPDFBuilder struct { | ||||
| 	fontName    PDFFontFamily | ||||
| 	fontStyle   PDFFontStyle | ||||
| 	fontSize    float64 | ||||
| 	debug       bool | ||||
| } | ||||
|  | ||||
| type PDFMargins struct { | ||||
| @@ -61,6 +63,19 @@ func (b *WPDFBuilder) SetMargins(v PDFMargins) { | ||||
|  | ||||
| func (b *WPDFBuilder) AddPage() { | ||||
| 	b.b.AddPage() | ||||
|  | ||||
| 	if b.debug { | ||||
|  | ||||
| 		ml, mt, mr, mb := b.GetMargins() | ||||
| 		pw, ph := b.GetPageSize() | ||||
|  | ||||
| 		b.Rect(pw-ml-mr, ph-mt-mb, RectOutline, NewPDFRectOpt().X(ml).Y(mt).LineWidth(0.25).DrawColor(0, 0, 128)) | ||||
|  | ||||
| 		b.Rect(pw, mt, RectFill, NewPDFRectOpt().X(0).Y(0).FillColor(0, 0, 255).Alpha(0.2, BlendNormal)) | ||||
| 		b.Rect(ml, ph-mt-mb, RectFill, NewPDFRectOpt().X(0).Y(mt).FillColor(0, 0, 255).Alpha(0.2, BlendNormal)) | ||||
| 		b.Rect(mr, ph-mt-mb, RectFill, NewPDFRectOpt().X(pw-mr).Y(mt).FillColor(0, 0, 255).Alpha(0.2, BlendNormal)) | ||||
| 		b.Rect(pw, mb, RectFill, NewPDFRectOpt().X(0).Y(ph-mb).FillColor(0, 0, 255).Alpha(0.2, BlendNormal)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) SetTextColor(cr, cg, cb int) { | ||||
| @@ -105,12 +120,38 @@ func (b *WPDFBuilder) SetFont(fontName PDFFontFamily, fontStyle PDFFontStyle, fo | ||||
| 	b.cellHeight = b.b.PointConvert(fontSize) | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) GetFontSize() float64 { | ||||
| 	return b.fontSize | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) GetFontFamily() PDFFontStyle { | ||||
| 	return b.fontStyle | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) GetFontStyle() float64 { | ||||
| 	return b.fontSize | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) SetCellSpacing(h float64) { | ||||
| 	b.cellSpacing = h | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Ln(h float64) { | ||||
| 	xBefore, yBefore := b.GetXY() | ||||
|  | ||||
| 	b.b.Ln(h) | ||||
|  | ||||
| 	yAfter := b.GetY() | ||||
|  | ||||
| 	if b.debug { | ||||
|  | ||||
| 		_, _, mr, _ := b.GetMargins() | ||||
| 		pw, _ := b.GetPageSize() | ||||
|  | ||||
| 		b.Rect(pw-mr-xBefore, yAfter-yBefore, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(128, 128, 0).Alpha(0.5, BlendNormal)) | ||||
| 		b.Rect(pw-mr-xBefore, yAfter-yBefore, RectFill, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).FillColor(128, 128, 0).Alpha(0.1, BlendNormal)) | ||||
| 		b.Line(xBefore, yBefore, pw-mr, yAfter, NewPDFLineOpt().LineWidth(0.25).DrawColor(128, 128, 0)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Build() ([]byte, error) { | ||||
| @@ -192,6 +233,48 @@ func (b *WPDFBuilder) GetWorkAreaWidth() float64 { | ||||
| 	return b.GetPageWidth() - b.GetMarginLeft() - b.GetMarginRight() | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) GetStringWidth(str string) float64 { | ||||
| func (b *WPDFBuilder) SetAutoPageBreak(auto bool, margin float64) { | ||||
| 	b.b.SetAutoPageBreak(auto, margin) | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) SetFooterFunc(fnc func()) { | ||||
| 	b.b.SetFooterFunc(fnc) | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) PageNo() int { | ||||
| 	return b.b.PageNo() | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Bookmark(txtStr string, level int, y float64) { | ||||
| 	b.b.Bookmark(b.tr(txtStr), level, y) | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) GetStringWidth(str string, opts ...PDFCellOpt) float64 { | ||||
|  | ||||
| 	var fontNameOverride *PDFFontFamily | ||||
| 	var fontStyleOverride *PDFFontStyle | ||||
| 	var fontSizeOverride *float64 | ||||
|  | ||||
| 	for _, opt := range opts { | ||||
| 		fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride) | ||||
| 		fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride) | ||||
| 		fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride) | ||||
| 	} | ||||
|  | ||||
| 	if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil { | ||||
| 		oldFontName := b.fontName | ||||
| 		oldFontStyle := b.fontStyle | ||||
| 		oldFontSize := b.fontSize | ||||
| 		newFontName := langext.Coalesce(fontNameOverride, oldFontName) | ||||
| 		newFontStyle := langext.Coalesce(fontStyleOverride, oldFontStyle) | ||||
| 		newFontSize := langext.Coalesce(fontSizeOverride, oldFontSize) | ||||
| 		b.SetFont(newFontName, newFontStyle, newFontSize) | ||||
| 		defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }() | ||||
| 	} | ||||
|  | ||||
| 	return b.b.GetStringWidth(str) | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Debug(v bool) { | ||||
| 	b.debug = v | ||||
| } | ||||
|   | ||||
| @@ -1,6 +1,9 @@ | ||||
| package wpdf | ||||
|  | ||||
| import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| type PDFCellOpt struct { | ||||
| 	width             *float64 | ||||
| @@ -14,6 +17,7 @@ type PDFCellOpt struct { | ||||
| 	fontNameOverride  *PDFFontFamily | ||||
| 	fontStyleOverride *PDFFontStyle | ||||
| 	fontSizeOverride  *float64 | ||||
| 	alphaOverride     *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	extraLn           *float64 | ||||
| 	x                 *float64 | ||||
| 	autoWidth         *bool | ||||
| @@ -21,6 +25,7 @@ type PDFCellOpt struct { | ||||
| 	borderColor       *PDFColor | ||||
| 	fillColor         *PDFColor | ||||
| 	autoWidthPaddingX *float64 | ||||
| 	debug             *bool | ||||
| } | ||||
|  | ||||
| func NewPDFCellOpt() *PDFCellOpt { | ||||
| @@ -149,12 +154,45 @@ func (opt *PDFCellOpt) FillColorHex(c uint32) *PDFCellOpt { | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFCellOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFCellOpt { | ||||
| 	opt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode} | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFCellOpt) Debug(v bool) *PDFCellOpt { | ||||
| 	opt.debug = &v | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFCellOpt) Copy() *PDFCellOpt { | ||||
| 	c := *opt | ||||
| 	return &c | ||||
| } | ||||
|  | ||||
| func (opt *PDFCellOpt) ToMulti() *PDFMultiCellOpt { | ||||
| 	return &PDFMultiCellOpt{ | ||||
| 		width:             opt.width, | ||||
| 		height:            opt.height, | ||||
| 		border:            opt.border, | ||||
| 		align:             opt.align, | ||||
| 		fill:              opt.fill, | ||||
| 		fontNameOverride:  opt.fontNameOverride, | ||||
| 		fontStyleOverride: opt.fontStyleOverride, | ||||
| 		fontSizeOverride:  opt.fontSizeOverride, | ||||
| 		extraLn:           opt.extraLn, | ||||
| 		x:                 opt.x, | ||||
| 		textColor:         opt.textColor, | ||||
| 		borderColor:       opt.borderColor, | ||||
| 		fillColor:         opt.fillColor, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) { | ||||
|  | ||||
| 	txtTR := b.tr(txt) | ||||
|  | ||||
| 	width := float64(0) | ||||
| 	height := b.cellHeight + b.cellSpacing | ||||
| 	var height *float64 = nil | ||||
| 	border := BorderNone | ||||
| 	ln := BreakToNextLine | ||||
| 	align := AlignLeft | ||||
| @@ -164,6 +202,7 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) { | ||||
| 	var fontNameOverride *PDFFontFamily | ||||
| 	var fontStyleOverride *PDFFontStyle | ||||
| 	var fontSizeOverride *float64 | ||||
| 	var alphaOverride *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	extraLn := float64(0) | ||||
| 	var x *float64 | ||||
| 	autoWidth := false | ||||
| @@ -171,10 +210,11 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) { | ||||
| 	var borderColor *PDFColor | ||||
| 	var fillColor *PDFColor | ||||
| 	autoWidthPaddingX := float64(0) | ||||
| 	debug := b.debug | ||||
|  | ||||
| 	for _, opt := range opts { | ||||
| 		width = langext.Coalesce(opt.width, width) | ||||
| 		height = langext.Coalesce(opt.height, height) | ||||
| 		height = langext.CoalesceOpt(opt.height, height) | ||||
| 		border = langext.Coalesce(opt.border, border) | ||||
| 		ln = langext.Coalesce(opt.ln, ln) | ||||
| 		align = langext.Coalesce(opt.align, align) | ||||
| @@ -184,6 +224,7 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) { | ||||
| 		fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride) | ||||
| 		fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride) | ||||
| 		fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride) | ||||
| 		alphaOverride = langext.CoalesceOpt(opt.alphaOverride, alphaOverride) | ||||
| 		extraLn = langext.Coalesce(opt.extraLn, extraLn) | ||||
| 		x = langext.CoalesceOpt(opt.x, x) | ||||
| 		autoWidth = langext.Coalesce(opt.autoWidth, autoWidth) | ||||
| @@ -191,6 +232,7 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) { | ||||
| 		borderColor = langext.CoalesceOpt(opt.borderColor, borderColor) | ||||
| 		fillColor = langext.CoalesceOpt(opt.fillColor, fillColor) | ||||
| 		autoWidthPaddingX = langext.Coalesce(opt.autoWidthPaddingX, autoWidthPaddingX) | ||||
| 		debug = langext.Coalesce(opt.debug, debug) | ||||
| 	} | ||||
|  | ||||
| 	if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil { | ||||
| @@ -204,6 +246,11 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) { | ||||
| 		defer func() { b.SetFont(oldFontName, oldFontStyle, oldFontSize) }() | ||||
| 	} | ||||
|  | ||||
| 	if height == nil { | ||||
| 		// (do after SetFont, so that b.cellHeight is correctly set to fontOverride) | ||||
| 		height = langext.Ptr(b.cellHeight + b.cellSpacing) | ||||
| 	} | ||||
|  | ||||
| 	if textColor != nil { | ||||
| 		oldColorR, oldColorG, oldColorB := b.b.GetTextColor() | ||||
| 		b.SetTextColor(textColor.R, textColor.G, textColor.B) | ||||
| @@ -222,15 +269,33 @@ func (b *WPDFBuilder) Cell(txt string, opts ...*PDFCellOpt) { | ||||
| 		defer func() { b.SetFillColor(oldColorR, oldColorG, oldColorB) }() | ||||
| 	} | ||||
|  | ||||
| 	if alphaOverride != nil { | ||||
| 		oldA, oldBMS := b.b.GetAlpha() | ||||
| 		b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2)) | ||||
| 		defer func() { b.b.SetAlpha(oldA, oldBMS) }() | ||||
| 	} | ||||
|  | ||||
| 	if x != nil { | ||||
| 		b.b.SetX(*x) | ||||
| 	} | ||||
|  | ||||
| 	if autoWidth { | ||||
| 		width = b.b.GetStringWidth(txtTR) + autoWidthPaddingX | ||||
| 		width = b.GetStringWidth(txtTR, langext.ArrDeRef(opts)...) + autoWidthPaddingX | ||||
| 	} | ||||
|  | ||||
| 	b.b.CellFormat(width, height, txtTR, string(border), int(ln), string(align), fill, link, linkStr) | ||||
| 	xBefore, yBefore := b.b.GetXY() | ||||
|  | ||||
| 	b.b.CellFormat(width, *height, txtTR, string(border), int(ln), string(align), fill, link, linkStr) | ||||
|  | ||||
| 	if debug { | ||||
| 		if ln == BreakToNextLine { | ||||
| 			b.Rect(b.GetPageWidth()-xBefore-b.GetMarginRight(), *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0)) | ||||
| 		} else if ln == BreakToRight { | ||||
| 			b.Rect(b.GetX()-xBefore, *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0)) | ||||
| 		} else if ln == BreakToBelow { | ||||
| 			b.Rect(b.GetPageWidth()-xBefore-b.GetMarginRight(), *height, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0)) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if extraLn != 0 { | ||||
| 		b.b.Ln(extraLn) | ||||
|   | ||||
| @@ -74,6 +74,35 @@ const ( | ||||
| 	RectFillOutline PDFRectStyle = "FD" | ||||
| ) | ||||
|  | ||||
| type PDFBlendMode string | ||||
|  | ||||
| const ( | ||||
| 	BlendNormal     PDFBlendMode = "Normal" | ||||
| 	BlendMultiply   PDFBlendMode = "Multiply" | ||||
| 	BlendScreen     PDFBlendMode = "Screen" | ||||
| 	BlendOverlay    PDFBlendMode = "Overlay" | ||||
| 	BlendDarken     PDFBlendMode = "Darken" | ||||
| 	BlendLighten    PDFBlendMode = "Lighten" | ||||
| 	BlendColorDodge PDFBlendMode = "ColorDodge" | ||||
| 	BlendColorBurn  PDFBlendMode = "ColorBurn" | ||||
| 	BlendHardLight  PDFBlendMode = "HardLight" | ||||
| 	BlendSoftLight  PDFBlendMode = "SoftLight" | ||||
| 	BlendDifference PDFBlendMode = "Difference" | ||||
| 	BlendExclusion  PDFBlendMode = "Exclusion" | ||||
| 	BlendHue        PDFBlendMode = "Hue" | ||||
| 	BlendSaturation PDFBlendMode = "Saturation" | ||||
| 	BlendColor      PDFBlendMode = "Color" | ||||
| 	BlendLuminosity PDFBlendMode = "Luminosity" | ||||
| ) | ||||
|  | ||||
| type PDFLineCapStyle string | ||||
|  | ||||
| const ( | ||||
| 	CapButt   PDFLineCapStyle = "butt" | ||||
| 	CapRound  PDFLineCapStyle = "round" | ||||
| 	CapSquare PDFLineCapStyle = "square" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	BackgroundFill        = true | ||||
| 	BackgroundTransparent = false | ||||
|   | ||||
| @@ -3,10 +3,12 @@ package wpdf | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"github.com/jung-kurt/gofpdf" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/imageext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"image" | ||||
| 	"image/color" | ||||
| 	"image/draw" | ||||
| 	"net/http" | ||||
| ) | ||||
|  | ||||
| @@ -64,7 +66,12 @@ func (b *WPDFBuilder) RegisterImage(bin []byte, opts ...*PDFImageRegisterOpt) *P | ||||
| 	} | ||||
|  | ||||
| 	if imageType == "" { | ||||
| 		ct := http.DetectContentType(bin[:512]) | ||||
| 		ct := "" | ||||
| 		if len(bin) > 512 { | ||||
| 			ct = http.DetectContentType(bin[:512]) | ||||
| 		} else { | ||||
| 			ct = http.DetectContentType(bin) | ||||
| 		} | ||||
| 		switch ct { | ||||
| 		case "image/jpg": | ||||
| 			imageType = "JPG" | ||||
| @@ -124,6 +131,8 @@ type PDFImageOpt struct { | ||||
| 	compression           *imageext.ImageCompresson | ||||
| 	reEncodePixelPerMM    *float64 | ||||
| 	crop                  *imageext.ImageCrop | ||||
| 	alphaOverride         *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	debug                 *bool | ||||
| } | ||||
|  | ||||
| func NewPDFImageOpt() *PDFImageOpt { | ||||
| @@ -150,6 +159,11 @@ func (opt *PDFImageOpt) Height(v float64) *PDFImageOpt { | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFImageOpt) Debug(v bool) *PDFImageOpt { | ||||
| 	opt.debug = &v | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFImageOpt) Flow(v bool) *PDFImageOpt { | ||||
| 	opt.flow = &v | ||||
| 	return opt | ||||
| @@ -211,6 +225,11 @@ func (opt *PDFImageOpt) Crop(cropX float64, cropY float64, cropWidth float64, cr | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFImageOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFImageOpt { | ||||
| 	opt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode} | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) { | ||||
| 	var err error | ||||
|  | ||||
| @@ -228,7 +247,9 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) { | ||||
| 	var imageFit *imageext.ImageFit = nil | ||||
| 	var fillColor color.Color = color.Transparent | ||||
| 	compression := imageext.CompressionPNGSpeed | ||||
| 	debug := b.debug | ||||
| 	var crop *imageext.ImageCrop = nil | ||||
| 	var alphaOverride *dataext.Tuple[float64, PDFBlendMode] | ||||
|  | ||||
| 	for _, opt := range opts { | ||||
| 		x = langext.Coalesce(opt.x, x) | ||||
| @@ -246,10 +267,18 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) { | ||||
| 		compression = langext.Coalesce(opt.compression, compression) | ||||
| 		reEncodePixelPerMM = langext.Coalesce(opt.reEncodePixelPerMM, reEncodePixelPerMM) | ||||
| 		crop = langext.CoalesceOpt(opt.crop, crop) | ||||
| 		debug = langext.Coalesce(opt.debug, debug) | ||||
| 		alphaOverride = langext.CoalesceOpt(opt.alphaOverride, alphaOverride) | ||||
| 	} | ||||
|  | ||||
| 	if flow { | ||||
| 		y = b.GetY() | ||||
| 	} | ||||
|  | ||||
| 	regName := img.Name | ||||
|  | ||||
| 	var subImageBounds *imageext.PercentageRectangle = nil | ||||
|  | ||||
| 	if imageFit != nil || fillColor != nil || crop != nil { | ||||
|  | ||||
| 		var dataimg image.Image | ||||
| @@ -277,11 +306,21 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) { | ||||
| 			pxw := w * pdfPixelPerMillimeter | ||||
| 			pxh := h * pdfPixelPerMillimeter | ||||
|  | ||||
| 			dataimg, err = imageext.ObjectFitImage(dataimg, pxw, pxh, *imageFit, fillColor) | ||||
| 			var dataImgRect imageext.PercentageRectangle | ||||
| 			dataimg, dataImgRect, err = imageext.ObjectFitImage(dataimg, pxw, pxh, *imageFit, fillColor) | ||||
| 			if err != nil { | ||||
| 				b.b.SetError(err) | ||||
| 				return | ||||
| 			} | ||||
|  | ||||
| 			subImageBounds = &dataImgRect | ||||
| 		} | ||||
|  | ||||
| 		if dataimg.ColorModel() != color.RGBAModel && dataimg.ColorModel() != color.NRGBAModel { | ||||
| 			// the image cannto be 16bpp or similar - otherwise fpdf errors out | ||||
| 			dataImgRGBA := image.NewNRGBA(image.Rect(0, 0, dataimg.Bounds().Dx(), dataimg.Bounds().Dy())) | ||||
| 			draw.Draw(dataImgRGBA, dataImgRGBA.Bounds(), dataimg, dataimg.Bounds().Min, draw.Src) | ||||
| 			dataimg = dataImgRGBA | ||||
| 		} | ||||
|  | ||||
| 		bfr, imgMime, err := imageext.EncodeImage(dataimg, compression) | ||||
| @@ -305,6 +344,12 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) { | ||||
|  | ||||
| 	} | ||||
|  | ||||
| 	if alphaOverride != nil { | ||||
| 		oldA, oldBMS := b.b.GetAlpha() | ||||
| 		b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2)) | ||||
| 		defer func() { b.b.SetAlpha(oldA, oldBMS) }() | ||||
| 	} | ||||
|  | ||||
| 	fpdfOpt := gofpdf.ImageOptions{ | ||||
| 		ImageType:             imageType, | ||||
| 		ReadDpi:               readDpi, | ||||
| @@ -312,4 +357,16 @@ func (b *WPDFBuilder) Image(img *PDFImageRef, opts ...*PDFImageOpt) { | ||||
| 	} | ||||
|  | ||||
| 	b.b.ImageOptions(regName, x, y, w, h, flow, fpdfOpt, link, linkStr) | ||||
|  | ||||
| 	if debug { | ||||
| 		b.Rect(w, h, RectOutline, NewPDFRectOpt().X(x).Y(y).LineWidth(0.25).DrawColor(255, 0, 0)) | ||||
|  | ||||
| 		if subImageBounds != nil { | ||||
| 			r := subImageBounds.Of(imageext.Rectangle{X: x, Y: y, W: w, H: h}) | ||||
| 			b.Rect(r.W, r.H, RectOutline, NewPDFRectOpt().X(r.X).Y(r.Y).LineWidth(0.25).DrawColor(255, 0, 0)) | ||||
| 			b.Rect(r.W, r.H, RectFill, NewPDFRectOpt().X(r.X).Y(r.Y).FillColor(255, 0, 0).Alpha(0.2, BlendNormal)) | ||||
| 			b.Line(r.X, r.Y, r.X+r.W, r.Y+r.H, NewPDFLineOpt().LineWidth(0.25).DrawColor(255, 0, 0)) | ||||
| 			b.Line(r.X+r.W, r.Y, r.X, r.Y+r.H, NewPDFLineOpt().LineWidth(0.25).DrawColor(255, 0, 0)) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|   | ||||
							
								
								
									
										96
									
								
								wpdf/wpdfLine.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										96
									
								
								wpdf/wpdfLine.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,96 @@ | ||||
| package wpdf | ||||
|  | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| type PDFLineOpt struct { | ||||
| 	lineWidth *float64 | ||||
| 	drawColor *PDFColor | ||||
| 	alpha     *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	capStyle  *PDFLineCapStyle | ||||
| 	debug     *bool | ||||
| } | ||||
|  | ||||
| func NewPDFLineOpt() *PDFLineOpt { | ||||
| 	return &PDFLineOpt{} | ||||
| } | ||||
|  | ||||
| func (opt *PDFLineOpt) LineWidth(v float64) *PDFLineOpt { | ||||
| 	opt.lineWidth = &v | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFLineOpt) DrawColor(cr, cg, cb int) *PDFLineOpt { | ||||
| 	opt.drawColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFLineOpt) DrawColorHex(c uint32) *PDFLineOpt { | ||||
| 	opt.drawColor = langext.Ptr(hexToColor(c)) | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFLineOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFLineOpt { | ||||
| 	opt.alpha = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode} | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFLineOpt) CapButt() *PDFLineOpt { | ||||
| 	opt.capStyle = langext.Ptr(CapButt) | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFLineOpt) CapSquare() *PDFLineOpt { | ||||
| 	opt.capStyle = langext.Ptr(CapSquare) | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFLineOpt) CapRound() *PDFLineOpt { | ||||
| 	opt.capStyle = langext.Ptr(CapRound) | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFLineOpt) Debug(v bool) *PDFLineOpt { | ||||
| 	opt.debug = &v | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Line(x1 float64, y1 float64, x2 float64, y2 float64, opts ...*PDFLineOpt) { | ||||
| 	var lineWidth *float64 | ||||
| 	var drawColor *PDFColor | ||||
| 	var alphaOverride *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	capStyle := CapButt | ||||
| 	debug := b.debug | ||||
|  | ||||
| 	for _, opt := range opts { | ||||
| 		lineWidth = langext.CoalesceOpt(opt.lineWidth, lineWidth) | ||||
| 		drawColor = langext.CoalesceOpt(opt.drawColor, drawColor) | ||||
| 		alphaOverride = langext.CoalesceOpt(opt.alpha, alphaOverride) | ||||
| 		capStyle = langext.Coalesce(opt.capStyle, capStyle) | ||||
| 		debug = langext.Coalesce(opt.debug, debug) | ||||
| 	} | ||||
|  | ||||
| 	if lineWidth != nil { | ||||
| 		old := b.GetLineWidth() | ||||
| 		b.SetLineWidth(*lineWidth) | ||||
| 		defer func() { b.SetLineWidth(old) }() | ||||
| 	} | ||||
|  | ||||
| 	if drawColor != nil { | ||||
| 		oldR, oldG, oldB := b.GetDrawColor() | ||||
| 		b.SetDrawColor(drawColor.R, drawColor.G, drawColor.B) | ||||
| 		defer func() { b.SetDrawColor(oldR, oldG, oldB) }() | ||||
| 	} | ||||
|  | ||||
| 	if alphaOverride != nil { | ||||
| 		oldA, oldBMS := b.b.GetAlpha() | ||||
| 		b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2)) | ||||
| 		defer func() { b.b.SetAlpha(oldA, oldBMS) }() | ||||
| 	} | ||||
|  | ||||
| 	b.b.SetLineCapStyle(string(capStyle)) | ||||
|  | ||||
| 	b.b.Line(x1, y1, x2, y2) | ||||
| } | ||||
| @@ -1,6 +1,9 @@ | ||||
| package wpdf | ||||
|  | ||||
| import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| type PDFMultiCellOpt struct { | ||||
| 	width             *float64 | ||||
| @@ -11,11 +14,13 @@ type PDFMultiCellOpt struct { | ||||
| 	fontNameOverride  *PDFFontFamily | ||||
| 	fontStyleOverride *PDFFontStyle | ||||
| 	fontSizeOverride  *float64 | ||||
| 	alphaOverride     *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	extraLn           *float64 | ||||
| 	x                 *float64 | ||||
| 	textColor         *PDFColor | ||||
| 	borderColor       *PDFColor | ||||
| 	fillColor         *PDFColor | ||||
| 	debug             *bool | ||||
| } | ||||
|  | ||||
| func NewPDFMultiCellOpt() *PDFMultiCellOpt { | ||||
| @@ -119,6 +124,21 @@ func (opt *PDFMultiCellOpt) FillColorHex(c uint32) *PDFMultiCellOpt { | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFMultiCellOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFMultiCellOpt { | ||||
| 	opt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode} | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFMultiCellOpt) Debug(v bool) *PDFMultiCellOpt { | ||||
| 	opt.debug = &v | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFMultiCellOpt) Copy() *PDFMultiCellOpt { | ||||
| 	c := *opt | ||||
| 	return &c | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) MultiCell(txt string, opts ...*PDFMultiCellOpt) { | ||||
|  | ||||
| 	txtTR := b.tr(txt) | ||||
| @@ -131,11 +151,13 @@ func (b *WPDFBuilder) MultiCell(txt string, opts ...*PDFMultiCellOpt) { | ||||
| 	var fontNameOverride *PDFFontFamily | ||||
| 	var fontStyleOverride *PDFFontStyle | ||||
| 	var fontSizeOverride *float64 | ||||
| 	var alphaOverride *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	extraLn := float64(0) | ||||
| 	var x *float64 | ||||
| 	var textColor *PDFColor | ||||
| 	var borderColor *PDFColor | ||||
| 	var fillColor *PDFColor | ||||
| 	debug := b.debug | ||||
|  | ||||
| 	for _, opt := range opts { | ||||
| 		width = langext.Coalesce(opt.width, width) | ||||
| @@ -146,11 +168,13 @@ func (b *WPDFBuilder) MultiCell(txt string, opts ...*PDFMultiCellOpt) { | ||||
| 		fontNameOverride = langext.CoalesceOpt(opt.fontNameOverride, fontNameOverride) | ||||
| 		fontStyleOverride = langext.CoalesceOpt(opt.fontStyleOverride, fontStyleOverride) | ||||
| 		fontSizeOverride = langext.CoalesceOpt(opt.fontSizeOverride, fontSizeOverride) | ||||
| 		alphaOverride = langext.CoalesceOpt(opt.alphaOverride, alphaOverride) | ||||
| 		extraLn = langext.Coalesce(opt.extraLn, extraLn) | ||||
| 		x = langext.CoalesceOpt(opt.x, x) | ||||
| 		textColor = langext.CoalesceOpt(opt.textColor, textColor) | ||||
| 		borderColor = langext.CoalesceOpt(opt.borderColor, borderColor) | ||||
| 		fillColor = langext.CoalesceOpt(opt.fillColor, fillColor) | ||||
| 		debug = langext.Coalesce(opt.debug, debug) | ||||
| 	} | ||||
|  | ||||
| 	if fontNameOverride != nil || fontStyleOverride != nil || fontSizeOverride != nil { | ||||
| @@ -182,12 +206,24 @@ func (b *WPDFBuilder) MultiCell(txt string, opts ...*PDFMultiCellOpt) { | ||||
| 		defer func() { b.SetFillColor(oldColorR, oldColorG, oldColorB) }() | ||||
| 	} | ||||
|  | ||||
| 	if alphaOverride != nil { | ||||
| 		oldA, oldBMS := b.b.GetAlpha() | ||||
| 		b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2)) | ||||
| 		defer func() { b.b.SetAlpha(oldA, oldBMS) }() | ||||
| 	} | ||||
|  | ||||
| 	if x != nil { | ||||
| 		b.b.SetX(*x) | ||||
| 	} | ||||
|  | ||||
| 	xBefore, yBefore := b.b.GetXY() | ||||
|  | ||||
| 	b.b.MultiCell(width, height, txtTR, string(border), string(align), fill) | ||||
|  | ||||
| 	if debug { | ||||
| 		b.Rect(b.GetPageWidth()-xBefore-b.GetMarginRight(), b.GetY()-yBefore, RectOutline, NewPDFRectOpt().X(xBefore).Y(yBefore).LineWidth(0.25).DrawColor(0, 128, 0)) | ||||
| 	} | ||||
|  | ||||
| 	if extraLn != 0 { | ||||
| 		b.b.Ln(extraLn) | ||||
| 	} | ||||
|   | ||||
| @@ -1,6 +1,9 @@ | ||||
| package wpdf | ||||
|  | ||||
| import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| type PDFRectOpt struct { | ||||
| 	x         *float64 | ||||
| @@ -8,10 +11,12 @@ type PDFRectOpt struct { | ||||
| 	lineWidth *float64 | ||||
| 	drawColor *PDFColor | ||||
| 	fillColor *PDFColor | ||||
| 	alpha     *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	radiusTL  *float64 | ||||
| 	radiusTR  *float64 | ||||
| 	radiusBR  *float64 | ||||
| 	radiusBL  *float64 | ||||
| 	debug     *bool | ||||
| } | ||||
|  | ||||
| func NewPDFRectOpt() *PDFRectOpt { | ||||
| @@ -81,16 +86,28 @@ func (opt *PDFRectOpt) RadiusBR(radius float64) *PDFRectOpt { | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFRectOpt) Alpha(alpha float64, blendMode PDFBlendMode) *PDFRectOpt { | ||||
| 	opt.alpha = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode} | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (opt *PDFRectOpt) Debug(v bool) *PDFRectOpt { | ||||
| 	opt.debug = &v | ||||
| 	return opt | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Rect(w float64, h float64, styleStr PDFRectStyle, opts ...*PDFRectOpt) { | ||||
| 	x := b.GetX() | ||||
| 	y := b.GetY() | ||||
| 	var lineWidth *float64 | ||||
| 	var drawColor *PDFColor | ||||
| 	var fillColor *PDFColor | ||||
| 	var alphaOverride *dataext.Tuple[float64, PDFBlendMode] | ||||
| 	radiusTL := float64(0) | ||||
| 	radiusTR := float64(0) | ||||
| 	radiusBR := float64(0) | ||||
| 	radiusBL := float64(0) | ||||
| 	debug := b.debug | ||||
|  | ||||
| 	for _, opt := range opts { | ||||
| 		x = langext.Coalesce(opt.x, x) | ||||
| @@ -98,10 +115,12 @@ func (b *WPDFBuilder) Rect(w float64, h float64, styleStr PDFRectStyle, opts ... | ||||
| 		lineWidth = langext.CoalesceOpt(opt.lineWidth, lineWidth) | ||||
| 		drawColor = langext.CoalesceOpt(opt.drawColor, drawColor) | ||||
| 		fillColor = langext.CoalesceOpt(opt.fillColor, fillColor) | ||||
| 		alphaOverride = langext.CoalesceOpt(opt.alpha, alphaOverride) | ||||
| 		radiusTL = langext.Coalesce(opt.radiusTL, radiusTL) | ||||
| 		radiusTR = langext.Coalesce(opt.radiusTR, radiusTR) | ||||
| 		radiusBR = langext.Coalesce(opt.radiusBR, radiusBR) | ||||
| 		radiusBL = langext.Coalesce(opt.radiusBL, radiusBL) | ||||
| 		debug = langext.Coalesce(opt.debug, debug) | ||||
| 	} | ||||
|  | ||||
| 	if lineWidth != nil { | ||||
| @@ -122,5 +141,11 @@ func (b *WPDFBuilder) Rect(w float64, h float64, styleStr PDFRectStyle, opts ... | ||||
| 		defer func() { b.SetFillColor(oldR, oldG, oldB) }() | ||||
| 	} | ||||
|  | ||||
| 	if alphaOverride != nil { | ||||
| 		oldA, oldBMS := b.b.GetAlpha() | ||||
| 		b.b.SetAlpha(alphaOverride.V1, string(alphaOverride.V2)) | ||||
| 		defer func() { b.b.SetAlpha(oldA, oldBMS) }() | ||||
| 	} | ||||
|  | ||||
| 	b.b.RoundedRectExt(x, y, w, h, radiusTL, radiusTR, radiusBR, radiusBL, string(styleStr)) | ||||
| } | ||||
|   | ||||
							
								
								
									
										346
									
								
								wpdf/wpdfTable.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										346
									
								
								wpdf/wpdfTable.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,346 @@ | ||||
| package wpdf | ||||
|  | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||
| 	"regexp" | ||||
| 	"strconv" | ||||
| ) | ||||
|  | ||||
| // Column specifier: | ||||
| // | ||||
| // - `{number}`:             Use this amount of space | ||||
| // - `auto`:                 Use the needed space for the content | ||||
| // - `*` / `fr`:             Use the remaining space, evenly distributed, shrink down to auto | ||||
| // - `{num}fr` / `{num}*`:   Use the remaining space, evenly distributed (weighted), shrink down to auto | ||||
| // | ||||
| // # TableBuilder | ||||
| //    - PadX/PadY:            Padding between cells | ||||
| //    - DefaultStyle:         Default style for cells | ||||
| // | ||||
| // # TableCellStyleOpt | ||||
| //    - MultiCell:            Use wpdf.MultiCell() instead of wpdf.Cell()  --> supports linebreaks | ||||
| //    - Ellipsize:            Ellipsize text if too long | ||||
| //    - PaddingHorz:          Additional horizontal padding inside of cell to space text around | ||||
| //    - PDFCellOpt:           Normal styling options (evtl not all are supported, depending on MultiCell: true/false) | ||||
|  | ||||
| var regexTableColumnSpecFr = rext.W(regexp.MustCompile(`^(?P<num>[0-9]*)(fr|\*)$`)) | ||||
|  | ||||
| type TableBuilder struct { | ||||
| 	builder *WPDFBuilder | ||||
|  | ||||
| 	padx             float64 | ||||
| 	pady             float64 | ||||
| 	rows             []tableRow | ||||
| 	defaultCellStyle *TableCellStyleOpt | ||||
| 	columnWidths     *[]string | ||||
| 	debug            *bool | ||||
| } | ||||
|  | ||||
| func (r tableRow) maxFontSize(defaultFontSize float64) float64 { | ||||
| 	mfs := defaultFontSize | ||||
| 	for _, cell := range r.cells { | ||||
| 		if cell.Style.fontSizeOverride != nil { | ||||
| 			mfs = max(mfs, *cell.Style.fontSizeOverride) | ||||
| 		} | ||||
| 	} | ||||
| 	return mfs | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) Widths(v ...string) *TableBuilder { | ||||
| 	b.columnWidths = &v | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) DefaultStyle(s *TableCellStyleOpt) *TableBuilder { | ||||
| 	b.defaultCellStyle = s | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) PadX(v float64) *TableBuilder { | ||||
| 	b.padx = v | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) PadY(v float64) *TableBuilder { | ||||
| 	b.pady = v | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) AddRow(cells ...TableCell) *TableBuilder { | ||||
| 	b.rows = append(b.rows, tableRow{cells: cells}) | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) AddRowWithStyle(style *TableCellStyleOpt, cells ...string) *TableBuilder { | ||||
| 	tcels := make([]TableCell, 0, len(cells)) | ||||
| 	for _, cell := range cells { | ||||
| 		tcels = append(tcels, TableCell{Content: cell, Style: *style}) | ||||
| 	} | ||||
|  | ||||
| 	b.rows = append(b.rows, tableRow{cells: tcels}) | ||||
|  | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) AddRowDefaultStyle(cells ...string) *TableBuilder { | ||||
| 	tcels := make([]TableCell, 0, len(cells)) | ||||
| 	for _, cell := range cells { | ||||
| 		tcels = append(tcels, TableCell{Content: cell, Style: langext.Coalesce(b.defaultCellStyle, TableCellStyleOpt{})}) | ||||
| 	} | ||||
|  | ||||
| 	b.rows = append(b.rows, tableRow{cells: tcels}) | ||||
|  | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) BuildRow() *TableRowBuilder { | ||||
| 	return &TableRowBuilder{tabbuilder: b, cells: make([]TableCell, 0)} | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) Build() { | ||||
| 	builder := b.builder | ||||
|  | ||||
| 	debug := langext.Coalesce(b.debug, b.builder.debug) | ||||
|  | ||||
| 	if len(b.rows) == 0 { | ||||
| 		return // nothing to do | ||||
| 	} | ||||
|  | ||||
| 	_, pageHeight := builder.FPDF().GetPageSize() | ||||
| 	pbEnabled, pbMargin := builder.FPDF().GetAutoPageBreak() | ||||
|  | ||||
| 	builder.FPDF().SetAutoPageBreak(false, 0) // manually handle pagebreak in tables | ||||
| 	defer func() { builder.FPDF().SetAutoPageBreak(pbEnabled, pbMargin) }() | ||||
|  | ||||
| 	columnWidths := b.calculateColumns() | ||||
|  | ||||
| 	columnCount := len(columnWidths) | ||||
|  | ||||
| 	for i, dat := range b.rows { | ||||
| 		if len(dat.cells) != columnCount { | ||||
| 			err := exerr.New(exerr.TypeInternal, "data must have the same length as header"). | ||||
| 				Int("idx", i). | ||||
| 				Strs("cells", langext.ArrMap(dat.cells, func(v TableCell) string { return v.Content })). | ||||
| 				Strs("colWidths", langext.Coalesce(b.columnWidths, nil)). | ||||
| 				Build() | ||||
| 			builder.FPDF().SetError(err) | ||||
| 			return | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	defaultFontSize, _ := builder.FPDF().GetFontSize() | ||||
|  | ||||
| 	for rowIdx, row := range b.rows { | ||||
| 		nextY := builder.GetY() | ||||
| 		for cellIdx, cell := range row.cells { | ||||
|  | ||||
| 			str := cell.Content | ||||
| 			style := cell.Style | ||||
|  | ||||
| 			ellipsize := langext.Coalesce(style.ellipsize, true) | ||||
| 			cellPaddingHorz := langext.Coalesce(style.paddingHorz, 2) | ||||
|  | ||||
| 			fillHeight := langext.Coalesce(style.fillHeight, false) | ||||
|  | ||||
| 			bx := builder.GetX() | ||||
| 			by := builder.GetY() | ||||
|  | ||||
| 			cellWidth := columnWidths[cellIdx] | ||||
|  | ||||
| 			_ = fillHeight // TODO implement, but how?? ( cells with fillHeight=true should have a border of the full column height, even if another column is growing it, but we do not know teh height beforehand ... ) | ||||
|  | ||||
| 			if langext.Coalesce(style.multiCell, true) { | ||||
|  | ||||
| 				builder.MultiCell(str, style.PDFCellOpt.Copy().ToMulti().Width(cellWidth).Debug(debug)) | ||||
|  | ||||
| 			} else { | ||||
|  | ||||
| 				if ellipsize { | ||||
| 					if builder.GetStringWidth(str, style.PDFCellOpt) > (cellWidth - cellPaddingHorz) { | ||||
| 						for builder.GetStringWidth(str+"...", style.PDFCellOpt) > (cellWidth-cellPaddingHorz) && len(str) > 0 { | ||||
| 							str = str[:len(str)-1] | ||||
| 						} | ||||
| 						str += "..." | ||||
| 					} | ||||
| 				} | ||||
|  | ||||
| 				builder.Cell(str, style.PDFCellOpt.Copy().Width(cellWidth).Debug(debug)) | ||||
|  | ||||
| 			} | ||||
|  | ||||
| 			nextY = max(nextY, builder.GetY()) | ||||
| 			builder.SetXY(bx+cellWidth+b.padx, by) | ||||
| 		} | ||||
| 		builder.SetY(nextY + b.pady) | ||||
|  | ||||
| 		if rowIdx < len(b.rows)-1 && pbEnabled && (builder.GetY()+b.rows[rowIdx+1].maxFontSize(defaultFontSize)) > (pageHeight-pbMargin) { | ||||
| 			builder.FPDF().AddPage() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) calculateColumns() []float64 { | ||||
| 	pageWidthTotal, _ := b.builder.FPDF().GetPageSize() | ||||
| 	marginLeft, _, marginRight, _ := b.builder.FPDF().GetMargins() | ||||
| 	pageWidth := pageWidthTotal - marginLeft - marginRight | ||||
|  | ||||
| 	columnDef := make([]string, 0) | ||||
|  | ||||
| 	if b.columnWidths != nil { | ||||
| 		columnDef = *b.columnWidths | ||||
| 	} else if len(b.rows) > 0 { | ||||
| 		columnDef = make([]string, len(b.rows[0].cells)) | ||||
| 		for i := range columnDef { | ||||
| 			columnDef[i] = "*" | ||||
| 		} | ||||
| 	} else { | ||||
| 		return []float64{} | ||||
| 	} | ||||
|  | ||||
| 	columnWidths := make([]float64, len(columnDef)) | ||||
|  | ||||
| 	frColumnWidthCount := 0 | ||||
| 	frColumnWeights := make([]float64, len(columnDef)) | ||||
| 	remainingWidth := pageWidth - (float64(len(columnDef)-1) * b.padx) | ||||
| 	autoWidths := make([]float64, len(columnDef)) | ||||
|  | ||||
| 	for colIdx := range columnDef { | ||||
| 		w := float64(0) | ||||
| 		for _, row := range b.rows { | ||||
| 			if len(row.cells) > colIdx { | ||||
| 				w = max(w, b.builder.GetStringWidth(row.cells[colIdx].Content, row.cells[colIdx].Style.PDFCellOpt)) | ||||
| 			} | ||||
| 		} | ||||
| 		autoWidths[colIdx] = w | ||||
| 	} | ||||
|  | ||||
| 	for colIdx, col := range columnDef { | ||||
|  | ||||
| 		maxPadHorz := float64(0) | ||||
|  | ||||
| 		minWidth := float64(0) | ||||
| 		for _, row := range b.rows { | ||||
| 			if len(row.cells) > colIdx { | ||||
|  | ||||
| 				ph := langext.Coalesce(row.cells[colIdx].Style.paddingHorz, 2) | ||||
| 				mw := langext.Coalesce(row.cells[colIdx].Style.minWidth, 0) | ||||
|  | ||||
| 				minWidth = max(minWidth, ph+mw) | ||||
|  | ||||
| 				maxPadHorz = max(maxPadHorz, ph) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		if col == "auto" { | ||||
|  | ||||
| 			w := max(autoWidths[colIdx]+maxPadHorz, minWidth) | ||||
|  | ||||
| 			columnWidths[colIdx] = w | ||||
| 			remainingWidth -= w | ||||
|  | ||||
| 		} else if match, ok := regexTableColumnSpecFr.MatchFirst(col); ok { | ||||
|  | ||||
| 			if match.GroupByName("num").Value() == "" { | ||||
| 				w := minWidth | ||||
|  | ||||
| 				frColumnWidthCount += 1 | ||||
| 				frColumnWeights[colIdx] = 1 | ||||
| 				columnWidths[colIdx] = w | ||||
| 				remainingWidth -= w | ||||
| 			} else { | ||||
| 				w := minWidth | ||||
|  | ||||
| 				n, _ := strconv.Atoi(match.GroupByName("num").Value()) | ||||
| 				frColumnWidthCount += n | ||||
| 				frColumnWeights[colIdx] = float64(n) | ||||
| 				columnWidths[colIdx] = w | ||||
| 				remainingWidth -= w | ||||
| 			} | ||||
|  | ||||
| 		} else { | ||||
|  | ||||
| 			if w, err := strconv.ParseFloat(col, 64); err == nil { | ||||
| 				w = max(w, minWidth) | ||||
|  | ||||
| 				columnWidths[colIdx] = w | ||||
| 				remainingWidth -= w | ||||
| 			} else { | ||||
| 				b.builder.FPDF().SetError(exerr.New(exerr.TypeInternal, "invalid column width").Str("width", col).Build()) | ||||
| 				w = max(w, minWidth) | ||||
|  | ||||
| 				columnWidths[colIdx] = w | ||||
| 				remainingWidth -= w | ||||
| 				return nil | ||||
| 			} | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if remainingWidth < 0 { | ||||
| 		// no remaining space to distribute | ||||
| 		return columnWidths | ||||
| 	} | ||||
|  | ||||
| 	{ | ||||
| 		rmSub := 0.0 | ||||
| 		for i := range columnDef { | ||||
| 			if frColumnWeights[i] != 0 { | ||||
| 				w := min(autoWidths[i], (remainingWidth/float64(frColumnWidthCount))*frColumnWeights[i]) | ||||
| 				rmSub += w - columnWidths[i] | ||||
| 				columnWidths[i] = w | ||||
| 			} | ||||
| 		} | ||||
| 		remainingWidth -= rmSub | ||||
| 	} | ||||
|  | ||||
| 	if remainingWidth > 0.01 { | ||||
| 		rmSub := 0.0 | ||||
| 		for i, _ := range columnDef { | ||||
| 			if frColumnWeights[i] != 0 { | ||||
| 				addW := (remainingWidth / float64(frColumnWidthCount)) * frColumnWeights[i] | ||||
| 				rmSub += addW | ||||
| 				columnWidths[i] += addW | ||||
| 			} | ||||
| 		} | ||||
| 		remainingWidth -= rmSub | ||||
| 	} | ||||
|  | ||||
| 	return columnWidths | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) RowCount() int { | ||||
| 	return len(b.rows) | ||||
| } | ||||
|  | ||||
| func (b *TableBuilder) Debug(v bool) *TableBuilder { | ||||
| 	b.debug = &v | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func (b *WPDFBuilder) Table() *TableBuilder { | ||||
| 	return &TableBuilder{ | ||||
| 		builder:          b, | ||||
| 		rows:             make([]tableRow, 0), | ||||
| 		pady:             2, | ||||
| 		padx:             2, | ||||
| 		defaultCellStyle: defaultTableStyle(), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func defaultTableStyle() *TableCellStyleOpt { | ||||
| 	return &TableCellStyleOpt{ | ||||
| 		PDFCellOpt: *NewPDFCellOpt(). | ||||
| 			FontSize(float64(8)). | ||||
| 			Border(BorderFull). | ||||
| 			BorderColorHex(uint32(0x666666)). | ||||
| 			FillColorHex(uint32(0xF0F0F0)). | ||||
| 			TextColorHex(uint32(0x000000)). | ||||
| 			FillBackground(true), | ||||
| 		minWidth:  langext.Ptr(float64(5)), | ||||
| 		ellipsize: langext.PTrue, | ||||
| 		multiCell: langext.PFalse, | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										187
									
								
								wpdf/wpdfTableCell.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										187
									
								
								wpdf/wpdfTableCell.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,187 @@ | ||||
| package wpdf | ||||
|  | ||||
| import ( | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/dataext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| type TableCell struct { | ||||
| 	Content string | ||||
| 	Style   TableCellStyleOpt | ||||
| } | ||||
|  | ||||
| type TableCellStyleOpt struct { | ||||
| 	multiCell   *bool | ||||
| 	ellipsize   *bool | ||||
| 	paddingHorz *float64 | ||||
| 	minWidth    *float64 | ||||
| 	fillHeight  *bool | ||||
|  | ||||
| 	PDFCellOpt | ||||
| } | ||||
|  | ||||
| func NewTableCellStyleOpt() *TableCellStyleOpt { | ||||
| 	return &TableCellStyleOpt{} | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) FillHeight(b bool) *TableCellStyleOpt { | ||||
| 	o.fillHeight = &b | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) MultiCell(v bool) *TableCellStyleOpt { | ||||
| 	o.multiCell = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Ellipsize(v bool) *TableCellStyleOpt { | ||||
| 	o.ellipsize = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) PaddingHorz(v float64) *TableCellStyleOpt { | ||||
| 	o.paddingHorz = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) MinWidth(v float64) *TableCellStyleOpt { | ||||
| 	o.minWidth = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) CellStyle(v PDFCellOpt) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt = v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Width(v float64) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.width = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Height(v float64) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.height = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Border(v PDFBorder) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.border = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) LnPos(v PDFTextBreak) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.ln = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Align(v PDFTextAlign) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.align = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) FillBackground(v bool) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fill = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Link(v int) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.link = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) LinkStr(v string) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.linkStr = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Font(fontName PDFFontFamily, fontStyle PDFFontStyle, fontSize float64) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fontNameOverride = &fontName | ||||
| 	o.PDFCellOpt.fontStyleOverride = &fontStyle | ||||
| 	o.PDFCellOpt.fontSizeOverride = &fontSize | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) FontName(v PDFFontFamily) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fontNameOverride = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) FontStyle(v PDFFontStyle) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fontStyleOverride = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) FontSize(v float64) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fontSizeOverride = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Bold() *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fontStyleOverride = langext.Ptr(Bold) | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Italic() *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fontStyleOverride = langext.Ptr(Italic) | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) LnAfter(v float64) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.extraLn = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) X(v float64) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.x = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) AutoWidth() *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.autoWidth = langext.PTrue | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) AutoWidthPaddingX(v float64) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.autoWidthPaddingX = &v | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) TextColor(cr, cg, cb int) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.textColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) TextColorHex(c uint32) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.textColor = langext.Ptr(hexToColor(c)) | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) BorderColor(cr, cg, cb int) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.borderColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) BorderColorHex(c uint32) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.borderColor = langext.Ptr(hexToColor(c)) | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) FillColor(cr, cg, cb int) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fillColor = langext.Ptr(rgbToColor(cr, cg, cb)) | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) FillColorHex(c uint32) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.fillColor = langext.Ptr(hexToColor(c)) | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Alpha(alpha float64, blendMode PDFBlendMode) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.alphaOverride = &dataext.Tuple[float64, PDFBlendMode]{V1: alpha, V2: blendMode} | ||||
| 	return o | ||||
| } | ||||
|  | ||||
| func (o *TableCellStyleOpt) Debug(v bool) *TableCellStyleOpt { | ||||
| 	o.PDFCellOpt.debug = &v | ||||
| 	return o | ||||
| } | ||||
							
								
								
									
										52
									
								
								wpdf/wpdfTableRow.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								wpdf/wpdfTableRow.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | ||||
| package wpdf | ||||
|  | ||||
| import "gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
|  | ||||
| type tableRow struct { | ||||
| 	cells []TableCell | ||||
| } | ||||
|  | ||||
| type TableRowBuilder struct { | ||||
| 	tabbuilder   *TableBuilder | ||||
| 	defaultStyle *TableCellStyleOpt | ||||
| 	cells        []TableCell | ||||
| } | ||||
|  | ||||
| func (r *TableRowBuilder) RowStyle(style *TableCellStyleOpt) *TableRowBuilder { | ||||
| 	r.defaultStyle = style | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func (r *TableRowBuilder) Cell(cell string) *TableRowBuilder { | ||||
| 	r.cells = append(r.cells, TableCell{Content: cell, Style: langext.Coalesce3(r.defaultStyle, r.tabbuilder.defaultCellStyle, TableCellStyleOpt{})}) | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func (r *TableRowBuilder) Cells(cells ...string) *TableRowBuilder { | ||||
| 	for _, cell := range cells { | ||||
| 		r.cells = append(r.cells, TableCell{Content: cell, Style: langext.Coalesce3(r.defaultStyle, r.tabbuilder.defaultCellStyle, TableCellStyleOpt{})}) | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func (r *TableRowBuilder) CellObject(cell TableCell) *TableRowBuilder { | ||||
| 	r.cells = append(r.cells, cell) | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func (r *TableRowBuilder) CellObjects(cells ...TableCell) *TableRowBuilder { | ||||
| 	for _, cell := range cells { | ||||
| 		r.cells = append(r.cells, cell) | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func (r *TableRowBuilder) CellWithStyle(cell string, style *TableCellStyleOpt) *TableRowBuilder { | ||||
| 	r.cells = append(r.cells, TableCell{Content: cell, Style: *style}) | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func (r *TableRowBuilder) BuildRow() *TableBuilder { | ||||
| 	r.tabbuilder.AddRow(r.cells...) | ||||
| 	return r.tabbuilder | ||||
| } | ||||
							
								
								
									
										105
									
								
								wpdf/wpdf_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										105
									
								
								wpdf/wpdf_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,105 @@ | ||||
| package wpdf | ||||
|  | ||||
| import ( | ||||
| 	_ "embed" | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/imageext" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"os" | ||||
| 	"path" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| //go:embed logo.png | ||||
| var logoData []byte | ||||
|  | ||||
| func TestPDFBuilder(t *testing.T) { | ||||
| 	builder := NewPDFBuilder(Portrait, SizeA4, true) | ||||
|  | ||||
| 	builder.Debug(true) | ||||
|  | ||||
| 	logoRef := builder.RegisterImage(logoData) | ||||
|  | ||||
| 	builder.SetMargins(PDFMargins{Left: 15, Top: 40, Right: 10}) | ||||
| 	builder.AddPage() | ||||
|  | ||||
| 	builder.SetFont(FontHelvetica, Normal, 10) | ||||
| 	builder.Cell("Neueinrichtung deiner Entgeltumwandlung", NewPDFCellOpt().Bold().FontSize(20)) | ||||
| 	builder.Ln(10) | ||||
|  | ||||
| 	builder.SetFont(FontHelvetica, Normal, 10) | ||||
| 	builder.Cell("Hello World", NewPDFCellOpt().Width(50).Align(AlignHorzCenter).LnPos(BreakToRight)) | ||||
| 	builder.IncX(10) | ||||
| 	builder.Cell("Second Text", NewPDFCellOpt().AutoWidth().AutoWidthPaddingX(2).LnPos(BreakToRight)) | ||||
| 	builder.Ln(10) | ||||
|  | ||||
| 	builder.MultiCell("Im Fall einer individuellen Entgeltumwandlung ist die Zuschussverpflichtung auf der Grundlage des Betriebsrentenstärkungsgesetzes in der gesetzlich vorgeschriebenen Höhe (§ 1a Abs. 1a BetrAVG), über den arbeitgeberfinanzierten Zuschuss erfüllt.") | ||||
| 	builder.Ln(4) | ||||
|  | ||||
| 	builder.Image(logoRef, NewPDFImageOpt().X(90).Y(160).Width(70).Height(30).ImageFit(imageext.ImageFitContainCenter)) | ||||
|  | ||||
| 	builder.Ln(4) | ||||
|  | ||||
| 	cellStyleHeader := &TableCellStyleOpt{ | ||||
| 		PDFCellOpt: *NewPDFCellOpt(). | ||||
| 			FontSize(float64(8)). | ||||
| 			BorderColorHex(uint32(0x666666)). | ||||
| 			Border(BorderFull). | ||||
| 			FillColorHex(uint32(0xC0C0C0)). | ||||
| 			FillBackground(true). | ||||
| 			TextColorHex(uint32(0x000000)). | ||||
| 			Align(AlignHorzCenter). | ||||
| 			Bold(), | ||||
| 		minWidth:  langext.Ptr(float64(5)), | ||||
| 		ellipsize: langext.PTrue, | ||||
| 		multiCell: langext.PFalse, | ||||
| 	} | ||||
|  | ||||
| 	cellStyleMulti := &TableCellStyleOpt{ | ||||
| 		PDFCellOpt: *NewPDFCellOpt(). | ||||
| 			FontSize(float64(8)). | ||||
| 			BorderColorHex(uint32(0x666666)). | ||||
| 			Border(BorderFull). | ||||
| 			FillColorHex(uint32(0xC060C0)). | ||||
| 			FillBackground(true). | ||||
| 			TextColorHex(uint32(0x000000)), | ||||
| 		minWidth:  langext.Ptr(float64(5)), | ||||
| 		ellipsize: langext.PFalse, | ||||
| 		multiCell: langext.PTrue, | ||||
| 	} | ||||
|  | ||||
| 	builder.Table(). | ||||
| 		Widths("auto", "20", "1fr", "20"). | ||||
| 		PadX(2). | ||||
| 		PadY(2). | ||||
| 		AddRowWithStyle(cellStyleHeader, "test", "hello", "123", "end"). | ||||
| 		AddRowDefaultStyle("test", "hello", "123", "end"). | ||||
| 		AddRowDefaultStyle("123", "helasdsalo", "a", "enwqad"). | ||||
| 		AddRowDefaultStyle("123asd", "TrimMeTrimMeTrimMeTrimMe", "a", "enwqad"). | ||||
| 		AddRowWithStyle(cellStyleMulti, "123", "helasdsalo", "a", "MultiCell: enwqad enw\nqad enwqad enwqad enwqad enwqad"). | ||||
| 		AddRowDefaultStyle("123", "helasdsalo", "a", "enwqad"). | ||||
| 		Debug(false). | ||||
| 		Build() | ||||
|  | ||||
| 	builder.Ln(8) | ||||
|  | ||||
| 	builder.Table(). | ||||
| 		Widths("auto", "20", "1fr", "20"). | ||||
| 		PadX(2). | ||||
| 		PadY(2). | ||||
| 		BuildRow().RowStyle(cellStyleHeader).Cells("test", "hello", "123", "end").BuildRow(). | ||||
| 		BuildRow().Cells("test", "hello", "123", "end").BuildRow(). | ||||
| 		BuildRow().RowStyle(cellStyleMulti.FillHeight(true)).Cell("123").Cell("helasdsalo").Cell("a").Cell("MultiCell: enwqad enw\nqad enwqad enwqad enwqad enwqad").BuildRow(). | ||||
| 		AddRowDefaultStyle("123", "helasdsalo", "a", "enwqad"). | ||||
| 		Debug(false). | ||||
| 		Build() | ||||
|  | ||||
| 	bin, err := builder.Build() | ||||
| 	if err != nil { | ||||
| 		t.Fatal(err) | ||||
| 	} | ||||
|  | ||||
| 	fn := "wpdf_test.pdf" | ||||
| 	_ = os.WriteFile(fn, bin, 0644) | ||||
| 	fmt.Println("file://" + path.Join(langext.Must(os.Getwd()), fn)) | ||||
| } | ||||
		Reference in New Issue
	
	Block a user