Compare commits

...

11 Commits

Author SHA1 Message Date
9b2028ab54 v0.0.541
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m3s
2024-11-05 14:38:42 +01:00
207fd331d5 v0.0.540 handle ct=nil same as ct=Start
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m25s
2024-10-28 14:35:05 +01:00
54b0d6701d v0.0.539
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m49s
2024-10-27 02:21:35 +02:00
fc2657179b v0.0.538
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m7s
2024-10-27 02:18:45 +02:00
d4894e31fe Merge branch 'cursortoken-paginated'
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-27 02:18:25 +02:00
0ddfaf666b v0.0.537 fix goext error print always showing error-type of highest-level error
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-10-26 23:38:13 +02:00
e154137105 Trying out paginated cursortoken variant [UNTESTED]
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m11s
2024-10-25 09:45:42 +02:00
9b9a79b4ad v0.0.536 revert bfcodegen changes
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m24s
2024-10-22 09:57:06 +02:00
5a8d7110e4 v0.0.535
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m52s
2024-10-22 09:41:59 +02:00
d47c84cd47 v0.0.534
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-22 09:40:53 +02:00
c571f3f888 v0.0.533 Made String() functions in bfcodegen nil-ptr safe
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m9s
2024-10-22 09:36:40 +02:00
9 changed files with 366 additions and 163 deletions

View File

@@ -3,12 +3,17 @@ package cursortoken
import ( import (
"encoding/base32" "encoding/base32"
"encoding/json" "encoding/json"
"go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/exerr" "gogs.mikescher.com/BlackForestBytes/goext/exerr"
"strconv"
"strings" "strings"
"time" "time"
) )
type CursorToken interface {
Token() string
IsEnd() bool
}
type Mode string type Mode string
const ( const (
@@ -24,97 +29,6 @@ type Extra struct {
PageSize *int PageSize *int
} }
type CursorToken struct {
Mode Mode
ValuePrimary string
ValueSecondary string
Direction SortDirection
DirectionSecondary SortDirection
PageSize int
Extra Extra
}
type cursorTokenSerialize struct {
ValuePrimary *string `json:"v1,omitempty"`
ValueSecondary *string `json:"v2,omitempty"`
Direction *SortDirection `json:"dir,omitempty"`
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
PageSize *int `json:"size,omitempty"`
ExtraTimestamp *time.Time `json:"ts,omitempty"`
ExtraId *string `json:"id,omitempty"`
ExtraPage *int `json:"pg,omitempty"`
ExtraPageSize *int `json:"sz,omitempty"`
}
func Start() CursorToken {
return CursorToken{
Mode: CTMStart,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func End() CursorToken {
return CursorToken{
Mode: CTMEnd,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func (c *CursorToken) Token() string {
if c.Mode == CTMStart {
return "@start"
}
if c.Mode == CTMEnd {
return "@end"
}
// We kinda manually implement omitempty for the CursorToken here
// because omitempty does not work for time.Time and otherwise we would always
// get weird time values when decoding a token that initially didn't have an Timestamp set
// For this usecase we treat Unix=0 as an empty timestamp
sertok := cursorTokenSerialize{}
if c.ValuePrimary != "" {
sertok.ValuePrimary = &c.ValuePrimary
}
if c.ValueSecondary != "" {
sertok.ValueSecondary = &c.ValueSecondary
}
if c.Direction != "" {
sertok.Direction = &c.Direction
}
if c.DirectionSecondary != "" {
sertok.DirectionSecondary = &c.DirectionSecondary
}
if c.PageSize != 0 {
sertok.PageSize = &c.PageSize
}
sertok.ExtraTimestamp = c.Extra.Timestamp
sertok.ExtraId = c.Extra.Id
sertok.ExtraPage = c.Extra.Page
sertok.ExtraPageSize = c.Extra.PageSize
body, err := json.Marshal(sertok)
if err != nil {
panic(err)
}
return "tok_" + base32.StdEncoding.EncodeToString(body)
}
func Decode(tok string) (CursorToken, error) { func Decode(tok string) (CursorToken, error) {
if tok == "" { if tok == "" {
return Start(), nil return Start(), nil
@@ -125,60 +39,56 @@ func Decode(tok string) (CursorToken, error) {
if strings.ToLower(tok) == "@end" { if strings.ToLower(tok) == "@end" {
return End(), nil return End(), nil
} }
if strings.ToLower(tok) == "$end" {
if !strings.HasPrefix(tok, "tok_") { return PageEnd(), nil
return CursorToken{}, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing prefix").Str("token", tok).Build() }
if strings.HasPrefix(tok, "$") && len(tok) > 1 {
n, err := strconv.ParseInt(tok[1:], 10, 64)
if err != nil {
return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build()
}
return Page(int(n)), nil
} }
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):]) if strings.HasPrefix(tok, "tok_") {
if err != nil {
return CursorToken{}, err
}
var tokenDeserialize cursorTokenSerialize body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
err = json.Unmarshal(body, &tokenDeserialize) if err != nil {
if err != nil { return nil, err
return CursorToken{}, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).Build() }
}
token := CursorToken{Mode: CTMNormal} var tokenDeserialize cursorTokenKeySortSerialize
err = json.Unmarshal(body, &tokenDeserialize)
if err != nil {
return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build()
}
if tokenDeserialize.ValuePrimary != nil { token := CTKeySort{Mode: CTMNormal}
token.ValuePrimary = *tokenDeserialize.ValuePrimary
}
if tokenDeserialize.ValueSecondary != nil {
token.ValueSecondary = *tokenDeserialize.ValueSecondary
}
if tokenDeserialize.Direction != nil {
token.Direction = *tokenDeserialize.Direction
}
if tokenDeserialize.DirectionSecondary != nil {
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
}
if tokenDeserialize.PageSize != nil {
token.PageSize = *tokenDeserialize.PageSize
}
token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp if tokenDeserialize.ValuePrimary != nil {
token.Extra.Id = tokenDeserialize.ExtraId token.ValuePrimary = *tokenDeserialize.ValuePrimary
token.Extra.Page = tokenDeserialize.ExtraPage }
token.Extra.PageSize = tokenDeserialize.ExtraPageSize if tokenDeserialize.ValueSecondary != nil {
token.ValueSecondary = *tokenDeserialize.ValueSecondary
}
if tokenDeserialize.Direction != nil {
token.Direction = *tokenDeserialize.Direction
}
if tokenDeserialize.DirectionSecondary != nil {
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
}
if tokenDeserialize.PageSize != nil {
token.PageSize = *tokenDeserialize.PageSize
}
return token, nil token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp
} token.Extra.Id = tokenDeserialize.ExtraId
token.Extra.Page = tokenDeserialize.ExtraPage
token.Extra.PageSize = tokenDeserialize.ExtraPageSize
return token, nil
func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
return oid, true
} else { } else {
return primitive.ObjectID{}, false return nil, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing/unknown prefix").Str("token", tok).Build()
}
}
func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
} }
} }

132
cursortoken/tokenKeySort.go Normal file
View File

@@ -0,0 +1,132 @@
package cursortoken
import (
"encoding/base32"
"encoding/json"
"go.mongodb.org/mongo-driver/bson/primitive"
"time"
)
type CTKeySort struct {
Mode Mode
ValuePrimary string
ValueSecondary string
Direction SortDirection
DirectionSecondary SortDirection
PageSize int
Extra Extra
}
type cursorTokenKeySortSerialize struct {
ValuePrimary *string `json:"v1,omitempty"`
ValueSecondary *string `json:"v2,omitempty"`
Direction *SortDirection `json:"dir,omitempty"`
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
PageSize *int `json:"size,omitempty"`
ExtraTimestamp *time.Time `json:"ts,omitempty"`
ExtraId *string `json:"id,omitempty"`
ExtraPage *int `json:"pg,omitempty"`
ExtraPageSize *int `json:"sz,omitempty"`
}
func NewKeySortToken(valuePrimary string, valueSecondary string, direction SortDirection, directionSecondary SortDirection, pageSize int, extra Extra) CursorToken {
return CTKeySort{
Mode: CTMNormal,
ValuePrimary: valuePrimary,
ValueSecondary: valueSecondary,
Direction: direction,
DirectionSecondary: directionSecondary,
PageSize: pageSize,
Extra: extra,
}
}
func Start() CursorToken {
return CTKeySort{
Mode: CTMStart,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func End() CursorToken {
return CTKeySort{
Mode: CTMEnd,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func (c CTKeySort) Token() string {
if c.Mode == CTMStart {
return "@start"
}
if c.Mode == CTMEnd {
return "@end"
}
// We kinda manually implement omitempty for the CursorToken here
// because omitempty does not work for time.Time and otherwise we would always
// get weird time values when decoding a token that initially didn't have an Timestamp set
// For this usecase we treat Unix=0 as an empty timestamp
sertok := cursorTokenKeySortSerialize{}
if c.ValuePrimary != "" {
sertok.ValuePrimary = &c.ValuePrimary
}
if c.ValueSecondary != "" {
sertok.ValueSecondary = &c.ValueSecondary
}
if c.Direction != "" {
sertok.Direction = &c.Direction
}
if c.DirectionSecondary != "" {
sertok.DirectionSecondary = &c.DirectionSecondary
}
if c.PageSize != 0 {
sertok.PageSize = &c.PageSize
}
sertok.ExtraTimestamp = c.Extra.Timestamp
sertok.ExtraId = c.Extra.Id
sertok.ExtraPage = c.Extra.Page
sertok.ExtraPageSize = c.Extra.PageSize
body, err := json.Marshal(sertok)
if err != nil {
panic(err)
}
return "tok_" + base32.StdEncoding.EncodeToString(body)
}
func (c CTKeySort) IsEnd() bool {
return c.Mode == CTMEnd
}
func (c CTKeySort) valuePrimaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}
func (c CTKeySort) valueSecondaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}

View File

@@ -0,0 +1,37 @@
package cursortoken
import "strconv"
type CTPaginated struct {
Mode Mode
Page int
}
func Page(p int) CursorToken {
return CTPaginated{
Mode: CTMNormal,
Page: p,
}
}
func PageEnd() CursorToken {
return CTPaginated{
Mode: CTMEnd,
Page: 0,
}
}
func (c CTPaginated) Token() string {
if c.Mode == CTMStart {
return "$1"
}
if c.Mode == CTMEnd {
return "$end"
}
return "$" + strconv.Itoa(c.Page)
}
func (c CTPaginated) IsEnd() bool {
return c.Mode == CTMEnd
}

View File

@@ -160,8 +160,8 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
for curr := ee; curr != nil; curr = curr.OriginalError { for curr := ee; curr != nil; curr = curr.OriginalError {
indent += " " indent += " "
etype := ee.Type.Key etype := curr.Type.Key
if ee.Type == TypeWrap { if curr.Type == TypeWrap {
etype = "~" etype = "~"
} }
@@ -370,6 +370,14 @@ func (ee *ExErr) GetExtra(key string) (any, bool) {
return nil, false return nil, false
} }
func (ee *ExErr) UniqueIDs() []string {
ids := []string{ee.UniqueID}
for curr := ee; curr != nil; curr = curr.OriginalError {
ids = append(ids, curr.UniqueID)
}
return ids
}
// contains test if the supplied error is contained in this error (anywhere in the chain) // contains test if the supplied error is contained in this error (anywhere in the chain)
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) { func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
if original == nil { if original == nil {

6
go.mod
View File

@@ -21,12 +21,12 @@ require (
) )
require ( require (
github.com/bytedance/sonic v1.12.3 // indirect github.com/bytedance/sonic v1.12.4 // indirect
github.com/bytedance/sonic/loader v0.2.0 // indirect github.com/bytedance/sonic/loader v0.2.1 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect github.com/cloudwego/iasm v0.2.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gabriel-vasile/mimetype v1.4.5 // indirect github.com/gabriel-vasile/mimetype v1.4.6 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect

6
go.sum
View File

@@ -3,9 +3,13 @@ filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/bytedance/sonic v1.12.3 h1:W2MGa7RCU1QTeYRTPE3+88mVC0yXmsRQRChiyVocVjU= github.com/bytedance/sonic v1.12.3 h1:W2MGa7RCU1QTeYRTPE3+88mVC0yXmsRQRChiyVocVjU=
github.com/bytedance/sonic v1.12.3/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= github.com/bytedance/sonic v1.12.3/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k=
github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM= github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM=
github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E=
github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
@@ -20,6 +24,8 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4= github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4=
github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4= github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4=
github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc=
github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=

View File

@@ -1,5 +1,5 @@
package goext package goext
const GoextVersion = "0.0.532" const GoextVersion = "0.0.541"
const GoextVersionTimestamp = "2024-10-13T16:33:39+0200" const GoextVersionTimestamp = "2024-11-05T14:38:42+0100"

View File

@@ -120,25 +120,25 @@ func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirecti
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary) valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)
if err != nil { if err != nil {
return ct.CursorToken{}, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build() return nil, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
} }
valueSeconary := "" valueSeconary := ""
if fieldSecondary != nil && dirSecondary != nil { if fieldSecondary != nil && dirSecondary != nil {
valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary) valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary)
if err != nil { if err != nil {
return ct.CursorToken{}, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build() return nil, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
} }
} }
return ct.CursorToken{ return ct.NewKeySortToken(
Mode: ct.CTMNormal, valuePrimary,
ValuePrimary: valuePrimary, valueSeconary,
ValueSecondary: valueSeconary, dirPrimary,
Direction: dirPrimary, dirPrimary,
PageSize: langext.Coalesce(pageSize, 0), langext.Coalesce(pageSize, 0),
Extra: ct.Extra{}, ct.Extra{},
}, nil ), nil
} }
func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool { func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool {

View File

@@ -10,6 +10,28 @@ import (
) )
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
if inTok == nil {
inTok = ct.Start()
}
if ctks, ok := inTok.(ct.CTKeySort); ok {
d, tok, err := c.listWithKSToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else if ctks, ok := inTok.(ct.CTPaginated); ok {
d, tok, err := c.listWithPaginatedToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else {
return nil, ct.End(), exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build()
}
}
func (c *Coll[TData]) listWithKSToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTKeySort) ([]TData, ct.CursorToken, error) {
if inTok.Mode == ct.CTMEnd { if inTok.Mode == ct.CTMEnd {
return make([]TData, 0), ct.End(), nil return make([]TData, 0), ct.End(), nil
} }
@@ -41,7 +63,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize) paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, exerr. return nil, nil, exerr.
Wrap(err, "failed to create pagination"). Wrap(err, "failed to create pagination").
WithType(exerr.TypeCursorTokenDecode). WithType(exerr.TypeCursorTokenDecode).
Str("collection", c.Name()). Str("collection", c.Name()).
@@ -66,7 +88,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
cursor, err := c.coll.Aggregate(ctx, pipeline) cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
} }
defer func() { _ = cursor.Close(ctx) }() defer func() { _ = cursor.Close(ctx) }()
@@ -75,7 +97,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
if pageSize == nil { if pageSize == nil {
entries, err := c.decodeAll(ctx, cursor) entries, err := c.decodeAll(ctx, cursor)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build() return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
} }
return entries, ct.End(), nil return entries, ct.End(), nil
} }
@@ -85,7 +107,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
var entry TData var entry TData
entry, err = c.decodeSingle(ctx, cursor) entry, err = c.decodeSingle(ctx, cursor)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build() return nil, nil, exerr.Wrap(err, "failed to decode entity").Build()
} }
entities = append(entities, entry) entities = append(entities, entry)
} }
@@ -100,12 +122,70 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize) nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build() return nil, nil, exerr.Wrap(err, "failed to create (out)-token").Build()
} }
return entities, nextToken, nil return entities, nextToken, nil
} }
func (c *Coll[TData]) listWithPaginatedToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTPaginated) ([]TData, ct.CursorToken, error) {
var err error
page := inTok.Page
if page < 0 {
page = 1
}
pipelineSort := mongo.Pipeline{}
pipelineFilter := mongo.Pipeline{}
if filter != nil {
pipelineFilter = filter.FilterQuery(ctx)
pf1, pd1, pf2, pd2 := filter.Pagination(ctx)
pipelineSort, err = createSortOnlyPipeline(pf1, pd1, &pf2, &pd2)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to create sort pipeline").Build()
}
}
pipelinePaginate := mongo.Pipeline{}
if pageSize != nil {
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *pageSize * (page - 1)}})
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *pageSize}})
} else {
page = 1
}
pipelineCount := mongo.Pipeline{}
pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}})
extrModPipelineResolved := mongo.Pipeline{}
for _, ppl := range c.extraModPipeline {
extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx))
}
pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
if err != nil {
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
entities, err := c.decodeAll(ctx, cursorList)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
}
tokOut := ct.Page(page + 1)
if pageSize == nil || len(entities) < *pageSize {
tokOut = ct.PageEnd()
}
return entities, tokOut, nil
}
func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) { func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) {
type countRes struct { type countRes struct {
Count int64 `bson:"c"` Count int64 `bson:"c"`
@@ -138,12 +218,12 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page. // NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
count, err := c.Count(ctx, filter) count, err := c.Count(ctx, filter)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, 0, err return nil, nil, 0, err
} }
data, token, err := c.List(ctx, filter, pageSize, inTok) data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil { if err != nil {
return nil, ct.CursorToken{}, 0, err return nil, nil, 0, err
} }
return data, token, count, nil return data, token, count, nil
} }
@@ -184,7 +264,7 @@ func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]st
return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil
} }
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) { func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CTKeySort, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) {
cond := bson.A{} cond := bson.A{}
sort := bson.D{} sort := bson.D{}
@@ -265,3 +345,33 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken
return pipeline, pipelineSort, nil return pipeline, pipelineSort, nil
} }
func createSortOnlyPipeline(fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection) ([]bson.D, error) {
sort := bson.D{}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
if *sortSecondary == ct.SortASC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1})
}
}
pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}}
return pipelineSort, nil
}