Compare commits
	
		
			11 Commits
		
	
	
		
			v0.0.526
			...
			cursortoke
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| e154137105 | |||
| 9b9a79b4ad | |||
| 5a8d7110e4 | |||
| d47c84cd47 | |||
| c571f3f888 | |||
| e884ba6b89 | |||
| 1a8e31e5ef | |||
| eccc0fe9e5 | |||
| c8dec24a0d | |||
| b8cb989e54 | |||
| ec672fbd49 | 
| @@ -3,12 +3,16 @@ package cursortoken | |||||||
| import ( | import ( | ||||||
| 	"encoding/base32" | 	"encoding/base32" | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"strconv" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | type CursorToken interface { | ||||||
|  | 	Token() string | ||||||
|  | } | ||||||
|  |  | ||||||
| type Mode string | type Mode string | ||||||
|  |  | ||||||
| const ( | const ( | ||||||
| @@ -24,97 +28,6 @@ type Extra struct { | |||||||
| 	PageSize  *int | 	PageSize  *int | ||||||
| } | } | ||||||
|  |  | ||||||
| type CursorToken struct { |  | ||||||
| 	Mode               Mode |  | ||||||
| 	ValuePrimary       string |  | ||||||
| 	ValueSecondary     string |  | ||||||
| 	Direction          SortDirection |  | ||||||
| 	DirectionSecondary SortDirection |  | ||||||
| 	PageSize           int |  | ||||||
| 	Extra              Extra |  | ||||||
| } |  | ||||||
|  |  | ||||||
| type cursorTokenSerialize struct { |  | ||||||
| 	ValuePrimary       *string        `json:"v1,omitempty"` |  | ||||||
| 	ValueSecondary     *string        `json:"v2,omitempty"` |  | ||||||
| 	Direction          *SortDirection `json:"dir,omitempty"` |  | ||||||
| 	DirectionSecondary *SortDirection `json:"dir2,omitempty"` |  | ||||||
| 	PageSize           *int           `json:"size,omitempty"` |  | ||||||
|  |  | ||||||
| 	ExtraTimestamp *time.Time `json:"ts,omitempty"` |  | ||||||
| 	ExtraId        *string    `json:"id,omitempty"` |  | ||||||
| 	ExtraPage      *int       `json:"pg,omitempty"` |  | ||||||
| 	ExtraPageSize  *int       `json:"sz,omitempty"` |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Start() CursorToken { |  | ||||||
| 	return CursorToken{ |  | ||||||
| 		Mode:               CTMStart, |  | ||||||
| 		ValuePrimary:       "", |  | ||||||
| 		ValueSecondary:     "", |  | ||||||
| 		Direction:          "", |  | ||||||
| 		DirectionSecondary: "", |  | ||||||
| 		PageSize:           0, |  | ||||||
| 		Extra:              Extra{}, |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func End() CursorToken { |  | ||||||
| 	return CursorToken{ |  | ||||||
| 		Mode:               CTMEnd, |  | ||||||
| 		ValuePrimary:       "", |  | ||||||
| 		ValueSecondary:     "", |  | ||||||
| 		Direction:          "", |  | ||||||
| 		DirectionSecondary: "", |  | ||||||
| 		PageSize:           0, |  | ||||||
| 		Extra:              Extra{}, |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *CursorToken) Token() string { |  | ||||||
| 	if c.Mode == CTMStart { |  | ||||||
| 		return "@start" |  | ||||||
| 	} |  | ||||||
| 	if c.Mode == CTMEnd { |  | ||||||
| 		return "@end" |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	// We kinda manually implement omitempty for the CursorToken here |  | ||||||
| 	// because omitempty does not work for time.Time and otherwise we would always |  | ||||||
| 	// get weird time values when decoding a token that initially didn't have an Timestamp set |  | ||||||
| 	// For this usecase we treat Unix=0 as an empty timestamp |  | ||||||
|  |  | ||||||
| 	sertok := cursorTokenSerialize{} |  | ||||||
|  |  | ||||||
| 	if c.ValuePrimary != "" { |  | ||||||
| 		sertok.ValuePrimary = &c.ValuePrimary |  | ||||||
| 	} |  | ||||||
| 	if c.ValueSecondary != "" { |  | ||||||
| 		sertok.ValueSecondary = &c.ValueSecondary |  | ||||||
| 	} |  | ||||||
| 	if c.Direction != "" { |  | ||||||
| 		sertok.Direction = &c.Direction |  | ||||||
| 	} |  | ||||||
| 	if c.DirectionSecondary != "" { |  | ||||||
| 		sertok.DirectionSecondary = &c.DirectionSecondary |  | ||||||
| 	} |  | ||||||
| 	if c.PageSize != 0 { |  | ||||||
| 		sertok.PageSize = &c.PageSize |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	sertok.ExtraTimestamp = c.Extra.Timestamp |  | ||||||
| 	sertok.ExtraId = c.Extra.Id |  | ||||||
| 	sertok.ExtraPage = c.Extra.Page |  | ||||||
| 	sertok.ExtraPageSize = c.Extra.PageSize |  | ||||||
|  |  | ||||||
| 	body, err := json.Marshal(sertok) |  | ||||||
| 	if err != nil { |  | ||||||
| 		panic(err) |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return "tok_" + base32.StdEncoding.EncodeToString(body) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func Decode(tok string) (CursorToken, error) { | func Decode(tok string) (CursorToken, error) { | ||||||
| 	if tok == "" { | 	if tok == "" { | ||||||
| 		return Start(), nil | 		return Start(), nil | ||||||
| @@ -125,23 +38,31 @@ func Decode(tok string) (CursorToken, error) { | |||||||
| 	if strings.ToLower(tok) == "@end" { | 	if strings.ToLower(tok) == "@end" { | ||||||
| 		return End(), nil | 		return End(), nil | ||||||
| 	} | 	} | ||||||
|  | 	if strings.ToLower(tok) == "$end" { | ||||||
| 	if !strings.HasPrefix(tok, "tok_") { | 		return PageEnd(), nil | ||||||
| 		return CursorToken{}, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing prefix").Str("token", tok).Build() |  | ||||||
| 	} | 	} | ||||||
|  | 	if strings.HasPrefix(tok, "$") && len(tok) > 1 { | ||||||
|  | 		n, err := strconv.ParseInt(tok[1:], 10, 64) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build() | ||||||
|  | 		} | ||||||
|  | 		return Page(int(n)), nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if strings.HasPrefix(tok, "tok_") { | ||||||
|  |  | ||||||
| 		body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):]) | 		body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):]) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 		return CursorToken{}, err | 			return nil, err | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 	var tokenDeserialize cursorTokenSerialize | 		var tokenDeserialize cursorTokenKeySortSerialize | ||||||
| 		err = json.Unmarshal(body, &tokenDeserialize) | 		err = json.Unmarshal(body, &tokenDeserialize) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 		return CursorToken{}, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).Build() | 			return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build() | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 	token := CursorToken{Mode: CTMNormal} | 		token := CTKeySort{Mode: CTMNormal} | ||||||
|  |  | ||||||
| 		if tokenDeserialize.ValuePrimary != nil { | 		if tokenDeserialize.ValuePrimary != nil { | ||||||
| 			token.ValuePrimary = *tokenDeserialize.ValuePrimary | 			token.ValuePrimary = *tokenDeserialize.ValuePrimary | ||||||
| @@ -165,20 +86,8 @@ func Decode(tok string) (CursorToken, error) { | |||||||
| 		token.Extra.PageSize = tokenDeserialize.ExtraPageSize | 		token.Extra.PageSize = tokenDeserialize.ExtraPageSize | ||||||
|  |  | ||||||
| 		return token, nil | 		return token, nil | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) { |  | ||||||
| 	if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil { |  | ||||||
| 		return oid, true |  | ||||||
| 	} else { | 	} else { | ||||||
| 		return primitive.ObjectID{}, false | 		return nil, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing/unknown prefix").Str("token", tok).Build() | ||||||
| 	} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) { |  | ||||||
| 	if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil { |  | ||||||
| 		return oid, true |  | ||||||
| 	} else { |  | ||||||
| 		return primitive.ObjectID{}, false |  | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										128
									
								
								cursortoken/tokenKeySort.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										128
									
								
								cursortoken/tokenKeySort.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,128 @@ | |||||||
|  | package cursortoken | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"encoding/base32" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type CTKeySort struct { | ||||||
|  | 	Mode               Mode | ||||||
|  | 	ValuePrimary       string | ||||||
|  | 	ValueSecondary     string | ||||||
|  | 	Direction          SortDirection | ||||||
|  | 	DirectionSecondary SortDirection | ||||||
|  | 	PageSize           int | ||||||
|  | 	Extra              Extra | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type cursorTokenKeySortSerialize struct { | ||||||
|  | 	ValuePrimary       *string        `json:"v1,omitempty"` | ||||||
|  | 	ValueSecondary     *string        `json:"v2,omitempty"` | ||||||
|  | 	Direction          *SortDirection `json:"dir,omitempty"` | ||||||
|  | 	DirectionSecondary *SortDirection `json:"dir2,omitempty"` | ||||||
|  | 	PageSize           *int           `json:"size,omitempty"` | ||||||
|  |  | ||||||
|  | 	ExtraTimestamp *time.Time `json:"ts,omitempty"` | ||||||
|  | 	ExtraId        *string    `json:"id,omitempty"` | ||||||
|  | 	ExtraPage      *int       `json:"pg,omitempty"` | ||||||
|  | 	ExtraPageSize  *int       `json:"sz,omitempty"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewKeySortToken(valuePrimary string, valueSecondary string, direction SortDirection, directionSecondary SortDirection, pageSize int, extra Extra) CursorToken { | ||||||
|  | 	return CTKeySort{ | ||||||
|  | 		Mode:               CTMNormal, | ||||||
|  | 		ValuePrimary:       valuePrimary, | ||||||
|  | 		ValueSecondary:     valueSecondary, | ||||||
|  | 		Direction:          direction, | ||||||
|  | 		DirectionSecondary: directionSecondary, | ||||||
|  | 		PageSize:           pageSize, | ||||||
|  | 		Extra:              extra, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Start() CursorToken { | ||||||
|  | 	return CTKeySort{ | ||||||
|  | 		Mode:               CTMStart, | ||||||
|  | 		ValuePrimary:       "", | ||||||
|  | 		ValueSecondary:     "", | ||||||
|  | 		Direction:          "", | ||||||
|  | 		DirectionSecondary: "", | ||||||
|  | 		PageSize:           0, | ||||||
|  | 		Extra:              Extra{}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func End() CursorToken { | ||||||
|  | 	return CTKeySort{ | ||||||
|  | 		Mode:               CTMEnd, | ||||||
|  | 		ValuePrimary:       "", | ||||||
|  | 		ValueSecondary:     "", | ||||||
|  | 		Direction:          "", | ||||||
|  | 		DirectionSecondary: "", | ||||||
|  | 		PageSize:           0, | ||||||
|  | 		Extra:              Extra{}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c CTKeySort) Token() string { | ||||||
|  |  | ||||||
|  | 	if c.Mode == CTMStart { | ||||||
|  | 		return "@start" | ||||||
|  | 	} | ||||||
|  | 	if c.Mode == CTMEnd { | ||||||
|  | 		return "@end" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// We kinda manually implement omitempty for the CursorToken here | ||||||
|  | 	// because omitempty does not work for time.Time and otherwise we would always | ||||||
|  | 	// get weird time values when decoding a token that initially didn't have an Timestamp set | ||||||
|  | 	// For this usecase we treat Unix=0 as an empty timestamp | ||||||
|  |  | ||||||
|  | 	sertok := cursorTokenKeySortSerialize{} | ||||||
|  |  | ||||||
|  | 	if c.ValuePrimary != "" { | ||||||
|  | 		sertok.ValuePrimary = &c.ValuePrimary | ||||||
|  | 	} | ||||||
|  | 	if c.ValueSecondary != "" { | ||||||
|  | 		sertok.ValueSecondary = &c.ValueSecondary | ||||||
|  | 	} | ||||||
|  | 	if c.Direction != "" { | ||||||
|  | 		sertok.Direction = &c.Direction | ||||||
|  | 	} | ||||||
|  | 	if c.DirectionSecondary != "" { | ||||||
|  | 		sertok.DirectionSecondary = &c.DirectionSecondary | ||||||
|  | 	} | ||||||
|  | 	if c.PageSize != 0 { | ||||||
|  | 		sertok.PageSize = &c.PageSize | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	sertok.ExtraTimestamp = c.Extra.Timestamp | ||||||
|  | 	sertok.ExtraId = c.Extra.Id | ||||||
|  | 	sertok.ExtraPage = c.Extra.Page | ||||||
|  | 	sertok.ExtraPageSize = c.Extra.PageSize | ||||||
|  |  | ||||||
|  | 	body, err := json.Marshal(sertok) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return "tok_" + base32.StdEncoding.EncodeToString(body) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c CTKeySort) valuePrimaryObjectId() (primitive.ObjectID, bool) { | ||||||
|  | 	if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil { | ||||||
|  | 		return oid, true | ||||||
|  | 	} else { | ||||||
|  | 		return primitive.ObjectID{}, false | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c CTKeySort) valueSecondaryObjectId() (primitive.ObjectID, bool) { | ||||||
|  | 	if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil { | ||||||
|  | 		return oid, true | ||||||
|  | 	} else { | ||||||
|  | 		return primitive.ObjectID{}, false | ||||||
|  | 	} | ||||||
|  | } | ||||||
							
								
								
									
										33
									
								
								cursortoken/tokenPaginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								cursortoken/tokenPaginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | |||||||
|  | package cursortoken | ||||||
|  |  | ||||||
|  | import "strconv" | ||||||
|  |  | ||||||
|  | type CTPaginated struct { | ||||||
|  | 	Mode Mode | ||||||
|  | 	Page int | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Page(p int) CursorToken { | ||||||
|  | 	return CTPaginated{ | ||||||
|  | 		Mode: CTMNormal, | ||||||
|  | 		Page: p, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PageEnd() CursorToken { | ||||||
|  | 	return CTPaginated{ | ||||||
|  | 		Mode: CTMEnd, | ||||||
|  | 		Page: 0, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c CTPaginated) Token() string { | ||||||
|  | 	if c.Mode == CTMStart { | ||||||
|  | 		return "$1" | ||||||
|  | 	} | ||||||
|  | 	if c.Mode == CTMEnd { | ||||||
|  | 		return "$end" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return "$" + strconv.Itoa(c.Page) | ||||||
|  | } | ||||||
| @@ -450,7 +450,7 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) { | |||||||
|  |  | ||||||
| // Print prints the error | // Print prints the error | ||||||
| // If the error is SevErr we also send it to the error-service | // If the error is SevErr we also send it to the error-service | ||||||
| func (b *Builder) Print(ctxs ...context.Context) { | func (b *Builder) Print(ctxs ...context.Context) Proxy { | ||||||
| 	warnOnPkgConfigNotInitialized() | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
| 	for _, dctx := range ctxs { | 	for _, dctx := range ctxs { | ||||||
| @@ -468,6 +468,8 @@ func (b *Builder) Print(ctxs ...context.Context) { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.errorData.CallListener(MethodPrint) | 	b.errorData.CallListener(MethodPrint) | ||||||
|  |  | ||||||
|  | 	return Proxy{v: *b.errorData} // we return Proxy<Exerr> here instead of Exerr to prevent warnings on ignored err-returns | ||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) Format(level LogPrintLevel) string { | func (b *Builder) Format(level LogPrintLevel) string { | ||||||
|   | |||||||
| @@ -19,6 +19,52 @@ func FromError(err error) *ExErr { | |||||||
| 		return verr | 		return verr | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|  | 	//goland:noinspection GoTypeAssertionOnErrors | ||||||
|  | 	if verr, ok := err.(langext.PanicWrappedErr); ok { | ||||||
|  | 		return &ExErr{ | ||||||
|  | 			UniqueID:       newID(), | ||||||
|  | 			Category:       CatForeign, | ||||||
|  | 			Type:           TypePanic, | ||||||
|  | 			Severity:       SevErr, | ||||||
|  | 			Timestamp:      time.Time{}, | ||||||
|  | 			StatusCode:     nil, | ||||||
|  | 			Message:        "A panic occured", | ||||||
|  | 			WrappedErrType: fmt.Sprintf("%T", verr), | ||||||
|  | 			WrappedErr:     err, | ||||||
|  | 			Caller:         "", | ||||||
|  | 			OriginalError:  nil, | ||||||
|  | 			Meta: MetaMap{ | ||||||
|  | 				"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())}, | ||||||
|  | 				"panic_type":   {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())}, | ||||||
|  | 				"stack":        {DataType: MDTString, Value: verr.Stack}, | ||||||
|  | 			}, | ||||||
|  | 			Extra: make(map[string]any), | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	//goland:noinspection GoTypeAssertionOnErrors | ||||||
|  | 	if verr, ok := err.(*langext.PanicWrappedErr); ok && verr != nil { | ||||||
|  | 		return &ExErr{ | ||||||
|  | 			UniqueID:       newID(), | ||||||
|  | 			Category:       CatForeign, | ||||||
|  | 			Type:           TypePanic, | ||||||
|  | 			Severity:       SevErr, | ||||||
|  | 			Timestamp:      time.Time{}, | ||||||
|  | 			StatusCode:     nil, | ||||||
|  | 			Message:        "A panic occured", | ||||||
|  | 			WrappedErrType: fmt.Sprintf("%T", verr), | ||||||
|  | 			WrappedErr:     err, | ||||||
|  | 			Caller:         "", | ||||||
|  | 			OriginalError:  nil, | ||||||
|  | 			Meta: MetaMap{ | ||||||
|  | 				"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())}, | ||||||
|  | 				"panic_type":   {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())}, | ||||||
|  | 				"stack":        {DataType: MDTString, Value: verr.Stack}, | ||||||
|  | 			}, | ||||||
|  | 			Extra: make(map[string]any), | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	// A foreign error (eg a MongoDB exception) | 	// A foreign error (eg a MongoDB exception) | ||||||
| 	return &ExErr{ | 	return &ExErr{ | ||||||
| 		UniqueID:       newID(), | 		UniqueID:       newID(), | ||||||
|   | |||||||
| @@ -48,6 +48,12 @@ func (ee *ExErr) toJson(depth int, applyExtendListener bool, outputMeta bool) la | |||||||
| 			metaJson[metaKey] = metaVal.rawValueForJson() | 			metaJson[metaKey] = metaVal.rawValueForJson() | ||||||
| 		} | 		} | ||||||
| 		ginJson["meta"] = metaJson | 		ginJson["meta"] = metaJson | ||||||
|  |  | ||||||
|  | 		extraJson := langext.H{} | ||||||
|  | 		for extraKey, extraVal := range ee.Extra { | ||||||
|  | 			extraJson[extraKey] = extraVal | ||||||
|  | 		} | ||||||
|  | 		ginJson["extra"] = extraJson | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if applyExtendListener { | 	if applyExtendListener { | ||||||
|   | |||||||
| @@ -111,3 +111,16 @@ func OriginalError(e error) error { | |||||||
|  |  | ||||||
| 	return bmerr | 	return bmerr | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func UniqueID(v error) *string { | ||||||
|  | 	if v == nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	//goland:noinspection GoTypeAssertionOnErrors | ||||||
|  | 	if verr, ok := v.(*ExErr); ok { | ||||||
|  | 		return &verr.UniqueID | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										13
									
								
								exerr/proxy.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								exerr/proxy.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | |||||||
|  | package exerr | ||||||
|  |  | ||||||
|  | type Proxy struct { | ||||||
|  | 	v ExErr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (p *Proxy) UniqueID() string { | ||||||
|  | 	return p.v.UniqueID | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (p *Proxy) Get() ExErr { | ||||||
|  | 	return p.v | ||||||
|  | } | ||||||
							
								
								
									
										8
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								go.mod
									
									
									
									
									
								
							| @@ -22,11 +22,11 @@ require ( | |||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/bytedance/sonic v1.12.3 // indirect | 	github.com/bytedance/sonic v1.12.3 // indirect | ||||||
| 	github.com/bytedance/sonic/loader v0.2.0 // indirect | 	github.com/bytedance/sonic/loader v0.2.1 // indirect | ||||||
| 	github.com/cloudwego/base64x v0.1.4 // indirect | 	github.com/cloudwego/base64x v0.1.4 // indirect | ||||||
| 	github.com/cloudwego/iasm v0.2.0 // indirect | 	github.com/cloudwego/iasm v0.2.0 // indirect | ||||||
| 	github.com/dustin/go-humanize v1.0.1 // indirect | 	github.com/dustin/go-humanize v1.0.1 // indirect | ||||||
| 	github.com/gabriel-vasile/mimetype v1.4.5 // indirect | 	github.com/gabriel-vasile/mimetype v1.4.6 // indirect | ||||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||||
| 	github.com/go-playground/locales v0.14.1 // indirect | 	github.com/go-playground/locales v0.14.1 // indirect | ||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| @@ -35,7 +35,7 @@ require ( | |||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
| 	github.com/google/uuid v1.5.0 // indirect | 	github.com/google/uuid v1.5.0 // indirect | ||||||
| 	github.com/json-iterator/go v1.1.12 // indirect | 	github.com/json-iterator/go v1.1.12 // indirect | ||||||
| 	github.com/klauspost/compress v1.17.10 // indirect | 	github.com/klauspost/compress v1.17.11 // indirect | ||||||
| 	github.com/klauspost/cpuid/v2 v2.2.8 // indirect | 	github.com/klauspost/cpuid/v2 v2.2.8 // indirect | ||||||
| 	github.com/leodido/go-urn v1.4.0 // indirect | 	github.com/leodido/go-urn v1.4.0 // indirect | ||||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||||
| @@ -55,7 +55,7 @@ require ( | |||||||
| 	golang.org/x/image v0.21.0 // indirect | 	golang.org/x/image v0.21.0 // indirect | ||||||
| 	golang.org/x/net v0.30.0 // indirect | 	golang.org/x/net v0.30.0 // indirect | ||||||
| 	golang.org/x/text v0.19.0 // indirect | 	golang.org/x/text v0.19.0 // indirect | ||||||
| 	google.golang.org/protobuf v1.34.2 // indirect | 	google.golang.org/protobuf v1.35.1 // indirect | ||||||
| 	gopkg.in/yaml.v3 v3.0.1 // indirect | 	gopkg.in/yaml.v3 v3.0.1 // indirect | ||||||
| 	modernc.org/libc v1.37.6 // indirect | 	modernc.org/libc v1.37.6 // indirect | ||||||
| 	modernc.org/mathutil v1.6.0 // indirect | 	modernc.org/mathutil v1.6.0 // indirect | ||||||
|   | |||||||
							
								
								
									
										8
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								go.sum
									
									
									
									
									
								
							| @@ -6,6 +6,8 @@ github.com/bytedance/sonic v1.12.3/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKz | |||||||
| github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||||
| github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM= | github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM= | ||||||
| github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||||
|  | github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E= | ||||||
|  | github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= | ||||||
| github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= | github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= | ||||||
| github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= | github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= | ||||||
| github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= | github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= | ||||||
| @@ -20,6 +22,8 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp | |||||||
| github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= | github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4= | github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4= | ||||||
| github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4= | github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc= | ||||||
|  | github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc= | ||||||
| github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= | ||||||
| github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= | ||||||
| github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= | github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= | ||||||
| @@ -57,6 +61,8 @@ github.com/jung-kurt/gofpdf v1.16.2 h1:jgbatWHfRlPYiK85qgevsZTHviWXKwB1TTiKdz5Pt | |||||||
| github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= | github.com/jung-kurt/gofpdf v1.16.2/go.mod h1:1hl7y57EsiPAkLbOwzpzqgx1A30nQCk/YmFV8S2vmK0= | ||||||
| github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0= | github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0= | ||||||
| github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= | github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= | ||||||
|  | github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= | ||||||
|  | github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= | ||||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= | github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
| @@ -168,6 +174,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc | |||||||
| golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | ||||||
| google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= | google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= | ||||||
| google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= | google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= | ||||||
|  | google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= | ||||||
|  | google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | ||||||
| gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | ||||||
| gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.526" | const GoextVersion = "0.0.536" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2024-10-05T23:59:23+0200" | const GoextVersionTimestamp = "2024-10-22T09:57:06+0200" | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ func (p PanicWrappedErr) Error() string { | |||||||
| 	return "A panic occured" | 	return "A panic occured" | ||||||
| } | } | ||||||
|  |  | ||||||
| func (p PanicWrappedErr) ReoveredObj() any { | func (p PanicWrappedErr) RecoveredObj() any { | ||||||
| 	return p.panic | 	return p.panic | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -120,25 +120,25 @@ func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirecti | |||||||
|  |  | ||||||
| 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | 	valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return ct.CursorToken{}, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build() | 		return nil, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	valueSeconary := "" | 	valueSeconary := "" | ||||||
| 	if fieldSecondary != nil && dirSecondary != nil { | 	if fieldSecondary != nil && dirSecondary != nil { | ||||||
| 		valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary) | 		valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return ct.CursorToken{}, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build() | 			return nil, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return ct.CursorToken{ | 	return ct.NewKeySortToken( | ||||||
| 		Mode:           ct.CTMNormal, | 		valuePrimary, | ||||||
| 		ValuePrimary:   valuePrimary, | 		valueSeconary, | ||||||
| 		ValueSecondary: valueSeconary, | 		dirPrimary, | ||||||
| 		Direction:      dirPrimary, | 		dirPrimary, | ||||||
| 		PageSize:       langext.Coalesce(pageSize, 0), | 		langext.Coalesce(pageSize, 0), | ||||||
| 		Extra:          ct.Extra{}, | 		ct.Extra{}, | ||||||
| 	}, nil | 	), nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool { | func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool { | ||||||
|   | |||||||
							
								
								
									
										122
									
								
								wmo/queryList.go
									
									
									
									
									
								
							
							
						
						
									
										122
									
								
								wmo/queryList.go
									
									
									
									
									
								
							| @@ -10,6 +10,24 @@ import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | ||||||
|  | 	if ctks, ok := inTok.(ct.CTKeySort); ok { | ||||||
|  | 		d, tok, err := c.listWithKSToken(ctx, filter, pageSize, ctks) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, ct.End(), err | ||||||
|  | 		} | ||||||
|  | 		return d, tok, nil | ||||||
|  | 	} else if ctks, ok := inTok.(ct.CTPaginated); ok { | ||||||
|  | 		d, tok, err := c.listWithPaginatedToken(ctx, filter, pageSize, ctks) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, ct.End(), err | ||||||
|  | 		} | ||||||
|  | 		return d, tok, nil | ||||||
|  | 	} else { | ||||||
|  | 		return nil, ct.End(), exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) listWithKSToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTKeySort) ([]TData, ct.CursorToken, error) { | ||||||
| 	if inTok.Mode == ct.CTMEnd { | 	if inTok.Mode == ct.CTMEnd { | ||||||
| 		return make([]TData, 0), ct.End(), nil | 		return make([]TData, 0), ct.End(), nil | ||||||
| 	} | 	} | ||||||
| @@ -41,7 +59,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
|  |  | ||||||
| 	paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | 	paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr. | 		return nil, nil, exerr. | ||||||
| 			Wrap(err, "failed to create pagination"). | 			Wrap(err, "failed to create pagination"). | ||||||
| 			WithType(exerr.TypeCursorTokenDecode). | 			WithType(exerr.TypeCursorTokenDecode). | ||||||
| 			Str("collection", c.Name()). | 			Str("collection", c.Name()). | ||||||
| @@ -66,7 +84,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | 		return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	defer func() { _ = cursor.Close(ctx) }() | 	defer func() { _ = cursor.Close(ctx) }() | ||||||
| @@ -75,7 +93,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 	if pageSize == nil { | 	if pageSize == nil { | ||||||
| 		entries, err := c.decodeAll(ctx, cursor) | 		entries, err := c.decodeAll(ctx, cursor) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build() | 			return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||||
| 		} | 		} | ||||||
| 		return entries, ct.End(), nil | 		return entries, ct.End(), nil | ||||||
| 	} | 	} | ||||||
| @@ -85,7 +103,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 		var entry TData | 		var entry TData | ||||||
| 		entry, err = c.decodeSingle(ctx, cursor) | 		entry, err = c.decodeSingle(ctx, cursor) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build() | 			return nil, nil, exerr.Wrap(err, "failed to decode entity").Build() | ||||||
| 		} | 		} | ||||||
| 		entities = append(entities, entry) | 		entities = append(entities, entry) | ||||||
| 	} | 	} | ||||||
| @@ -100,12 +118,70 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
|  |  | ||||||
| 	nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) | 	nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build() | 		return nil, nil, exerr.Wrap(err, "failed to create (out)-token").Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	return entities, nextToken, nil | 	return entities, nextToken, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) listWithPaginatedToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTPaginated) ([]TData, ct.CursorToken, error) { | ||||||
|  | 	var err error | ||||||
|  |  | ||||||
|  | 	page := inTok.Page | ||||||
|  |  | ||||||
|  | 	if page < 0 { | ||||||
|  | 		page = 1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineSort := mongo.Pipeline{} | ||||||
|  | 	pipelineFilter := mongo.Pipeline{} | ||||||
|  |  | ||||||
|  | 	if filter != nil { | ||||||
|  | 		pipelineFilter = filter.FilterQuery(ctx) | ||||||
|  | 		pf1, pd1, pf2, pd2 := filter.Pagination(ctx) | ||||||
|  |  | ||||||
|  | 		pipelineSort, err = createSortOnlyPipeline(pf1, pd1, &pf2, &pd2) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, nil, exerr.Wrap(err, "failed to create sort pipeline").Build() | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelinePaginate := mongo.Pipeline{} | ||||||
|  | 	if pageSize != nil { | ||||||
|  | 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *pageSize * (page - 1)}}) | ||||||
|  | 		pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *pageSize}}) | ||||||
|  | 	} else { | ||||||
|  | 		page = 1 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineCount := mongo.Pipeline{} | ||||||
|  | 	pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}}) | ||||||
|  |  | ||||||
|  | 	extrModPipelineResolved := mongo.Pipeline{} | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort) | ||||||
|  |  | ||||||
|  | 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	entities, err := c.decodeAll(ctx, cursorList) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tokOut := ct.Page(page + 1) | ||||||
|  | 	if pageSize == nil || len(entities) < *pageSize { | ||||||
|  | 		tokOut = ct.PageEnd() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return entities, tokOut, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) { | func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) { | ||||||
| 	type countRes struct { | 	type countRes struct { | ||||||
| 		Count int64 `bson:"c"` | 		Count int64 `bson:"c"` | ||||||
| @@ -138,12 +214,12 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS | |||||||
| 	// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page. | 	// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page. | ||||||
| 	count, err := c.Count(ctx, filter) | 	count, err := c.Count(ctx, filter) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, 0, err | 		return nil, nil, 0, err | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	data, token, err := c.List(ctx, filter, pageSize, inTok) | 	data, token, err := c.List(ctx, filter, pageSize, inTok) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, ct.CursorToken{}, 0, err | 		return nil, nil, 0, err | ||||||
| 	} | 	} | ||||||
| 	return data, token, count, nil | 	return data, token, count, nil | ||||||
| } | } | ||||||
| @@ -184,7 +260,7 @@ func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]st | |||||||
| 	return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil | 	return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) { | func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CTKeySort, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) { | ||||||
|  |  | ||||||
| 	cond := bson.A{} | 	cond := bson.A{} | ||||||
| 	sort := bson.D{} | 	sort := bson.D{} | ||||||
| @@ -265,3 +341,33 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken | |||||||
|  |  | ||||||
| 	return pipeline, pipelineSort, nil | 	return pipeline, pipelineSort, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func createSortOnlyPipeline(fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection) ([]bson.D, error) { | ||||||
|  |  | ||||||
|  | 	sort := bson.D{} | ||||||
|  |  | ||||||
|  | 	if sortPrimary == ct.SortASC { | ||||||
|  | 		// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary | ||||||
|  | 		sort = append(sort, bson.E{Key: fieldPrimary, Value: +1}) | ||||||
|  | 	} else if sortPrimary == ct.SortDESC { | ||||||
|  | 		// We sort DESC on <field> - so we want all entries older ($lt) than the $primary | ||||||
|  | 		sort = append(sort, bson.E{Key: fieldPrimary, Value: -1}) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary { | ||||||
|  |  | ||||||
|  | 		if *sortSecondary == ct.SortASC { | ||||||
|  |  | ||||||
|  | 			sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1}) | ||||||
|  |  | ||||||
|  | 		} else if *sortSecondary == ct.SortDESC { | ||||||
|  |  | ||||||
|  | 			sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1}) | ||||||
|  |  | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}} | ||||||
|  |  | ||||||
|  | 	return pipelineSort, nil | ||||||
|  | } | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user