Compare commits

...

21 Commits

Author SHA1 Message Date
36b092774d v0.0.550 ArrMapSum
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-12-26 00:23:24 +01:00
a8c6e39ac5 v0.0.549
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m4s
2024-12-10 13:24:06 +01:00
62f2ce9268 v0.0.548
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m56s
2024-12-09 17:39:35 +01:00
49375e90f0 v0.0.547 allow calling ListWithCount with pageSize=0
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m55s
2024-12-08 18:04:04 +01:00
d8cf255c80 v0.0.546 Fix ginext json-parse error when the bufferedReader was read beforehand
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m55s
2024-11-28 12:06:57 +01:00
b520282ba0 v0.0.545
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m6s
2024-11-27 13:21:45 +01:00
27cc9366b5 v0.0.544
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m0s
2024-11-26 15:10:27 +01:00
d9517fe73c v0.0.543
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m7s
2024-11-13 15:03:51 +01:00
8a92a6cc52 v0.0.542
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m9s
2024-11-07 13:13:12 +01:00
9b2028ab54 v0.0.541
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 3m3s
2024-11-05 14:38:42 +01:00
207fd331d5 v0.0.540 handle ct=nil same as ct=Start
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m25s
2024-10-28 14:35:05 +01:00
54b0d6701d v0.0.539
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m49s
2024-10-27 02:21:35 +02:00
fc2657179b v0.0.538
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m7s
2024-10-27 02:18:45 +02:00
d4894e31fe Merge branch 'cursortoken-paginated'
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-27 02:18:25 +02:00
0ddfaf666b v0.0.537 fix goext error print always showing error-type of highest-level error
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m12s
2024-10-26 23:38:13 +02:00
e154137105 Trying out paginated cursortoken variant [UNTESTED]
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m11s
2024-10-25 09:45:42 +02:00
9b9a79b4ad v0.0.536 revert bfcodegen changes
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m24s
2024-10-22 09:57:06 +02:00
5a8d7110e4 v0.0.535
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m52s
2024-10-22 09:41:59 +02:00
d47c84cd47 v0.0.534
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Has been cancelled
2024-10-22 09:40:53 +02:00
c571f3f888 v0.0.533 Made String() functions in bfcodegen nil-ptr safe
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m9s
2024-10-22 09:36:40 +02:00
e884ba6b89 v0.0.532
All checks were successful
Build Docker and Deploy / Run goext test-suite (push) Successful in 2m53s
2024-10-13 16:33:39 +02:00
21 changed files with 1264 additions and 189 deletions

View File

@@ -6,3 +6,13 @@ const (
SortASC SortDirection = "ASC"
SortDESC SortDirection = "DESC"
)
func (sd SortDirection) ToMongo() int {
if sd == SortASC {
return 1
} else if sd == SortDESC {
return -1
} else {
return 0
}
}

View File

@@ -3,12 +3,18 @@ package cursortoken
import (
"encoding/base32"
"encoding/json"
"go.mongodb.org/mongo-driver/bson/primitive"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"strconv"
"strings"
"time"
)
type CursorToken interface {
Token() string
IsStart() bool
IsEnd() bool
}
type Mode string
const (
@@ -24,97 +30,6 @@ type Extra struct {
PageSize *int
}
type CursorToken struct {
Mode Mode
ValuePrimary string
ValueSecondary string
Direction SortDirection
DirectionSecondary SortDirection
PageSize int
Extra Extra
}
type cursorTokenSerialize struct {
ValuePrimary *string `json:"v1,omitempty"`
ValueSecondary *string `json:"v2,omitempty"`
Direction *SortDirection `json:"dir,omitempty"`
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
PageSize *int `json:"size,omitempty"`
ExtraTimestamp *time.Time `json:"ts,omitempty"`
ExtraId *string `json:"id,omitempty"`
ExtraPage *int `json:"pg,omitempty"`
ExtraPageSize *int `json:"sz,omitempty"`
}
func Start() CursorToken {
return CursorToken{
Mode: CTMStart,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func End() CursorToken {
return CursorToken{
Mode: CTMEnd,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func (c *CursorToken) Token() string {
if c.Mode == CTMStart {
return "@start"
}
if c.Mode == CTMEnd {
return "@end"
}
// We kinda manually implement omitempty for the CursorToken here
// because omitempty does not work for time.Time and otherwise we would always
// get weird time values when decoding a token that initially didn't have an Timestamp set
// For this usecase we treat Unix=0 as an empty timestamp
sertok := cursorTokenSerialize{}
if c.ValuePrimary != "" {
sertok.ValuePrimary = &c.ValuePrimary
}
if c.ValueSecondary != "" {
sertok.ValueSecondary = &c.ValueSecondary
}
if c.Direction != "" {
sertok.Direction = &c.Direction
}
if c.DirectionSecondary != "" {
sertok.DirectionSecondary = &c.DirectionSecondary
}
if c.PageSize != 0 {
sertok.PageSize = &c.PageSize
}
sertok.ExtraTimestamp = c.Extra.Timestamp
sertok.ExtraId = c.Extra.Id
sertok.ExtraPage = c.Extra.Page
sertok.ExtraPageSize = c.Extra.PageSize
body, err := json.Marshal(sertok)
if err != nil {
panic(err)
}
return "tok_" + base32.StdEncoding.EncodeToString(body)
}
func Decode(tok string) (CursorToken, error) {
if tok == "" {
return Start(), nil
@@ -125,60 +40,56 @@ func Decode(tok string) (CursorToken, error) {
if strings.ToLower(tok) == "@end" {
return End(), nil
}
if !strings.HasPrefix(tok, "tok_") {
return CursorToken{}, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing prefix").Str("token", tok).Build()
if strings.ToLower(tok) == "$end" {
return PageEnd(), nil
}
if strings.HasPrefix(tok, "$") && len(tok) > 1 {
n, err := strconv.ParseInt(tok[1:], 10, 64)
if err != nil {
return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build()
}
return Page(int(n)), nil
}
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
if err != nil {
return CursorToken{}, err
}
if strings.HasPrefix(tok, "tok_") {
var tokenDeserialize cursorTokenSerialize
err = json.Unmarshal(body, &tokenDeserialize)
if err != nil {
return CursorToken{}, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).Build()
}
body, err := base32.StdEncoding.DecodeString(tok[len("tok_"):])
if err != nil {
return nil, err
}
token := CursorToken{Mode: CTMNormal}
var tokenDeserialize cursorTokenKeySortSerialize
err = json.Unmarshal(body, &tokenDeserialize)
if err != nil {
return nil, exerr.Wrap(err, "failed to deserialize token").Str("token", tok).WithType(exerr.TypeCursorTokenDecode).Build()
}
if tokenDeserialize.ValuePrimary != nil {
token.ValuePrimary = *tokenDeserialize.ValuePrimary
}
if tokenDeserialize.ValueSecondary != nil {
token.ValueSecondary = *tokenDeserialize.ValueSecondary
}
if tokenDeserialize.Direction != nil {
token.Direction = *tokenDeserialize.Direction
}
if tokenDeserialize.DirectionSecondary != nil {
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
}
if tokenDeserialize.PageSize != nil {
token.PageSize = *tokenDeserialize.PageSize
}
token := CTKeySort{Mode: CTMNormal}
token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp
token.Extra.Id = tokenDeserialize.ExtraId
token.Extra.Page = tokenDeserialize.ExtraPage
token.Extra.PageSize = tokenDeserialize.ExtraPageSize
if tokenDeserialize.ValuePrimary != nil {
token.ValuePrimary = *tokenDeserialize.ValuePrimary
}
if tokenDeserialize.ValueSecondary != nil {
token.ValueSecondary = *tokenDeserialize.ValueSecondary
}
if tokenDeserialize.Direction != nil {
token.Direction = *tokenDeserialize.Direction
}
if tokenDeserialize.DirectionSecondary != nil {
token.DirectionSecondary = *tokenDeserialize.DirectionSecondary
}
if tokenDeserialize.PageSize != nil {
token.PageSize = *tokenDeserialize.PageSize
}
return token, nil
}
token.Extra.Timestamp = tokenDeserialize.ExtraTimestamp
token.Extra.Id = tokenDeserialize.ExtraId
token.Extra.Page = tokenDeserialize.ExtraPage
token.Extra.PageSize = tokenDeserialize.ExtraPageSize
return token, nil
func (c *CursorToken) ValuePrimaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}
func (c *CursorToken) ValueSecondaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
return nil, exerr.New(exerr.TypeCursorTokenDecode, "could not decode token, missing/unknown prefix").Str("token", tok).Build()
}
}

136
cursortoken/tokenKeySort.go Normal file
View File

@@ -0,0 +1,136 @@
package cursortoken
import (
"encoding/base32"
"encoding/json"
"go.mongodb.org/mongo-driver/bson/primitive"
"time"
)
type CTKeySort struct {
Mode Mode
ValuePrimary string
ValueSecondary string
Direction SortDirection
DirectionSecondary SortDirection
PageSize int
Extra Extra
}
type cursorTokenKeySortSerialize struct {
ValuePrimary *string `json:"v1,omitempty"`
ValueSecondary *string `json:"v2,omitempty"`
Direction *SortDirection `json:"dir,omitempty"`
DirectionSecondary *SortDirection `json:"dir2,omitempty"`
PageSize *int `json:"size,omitempty"`
ExtraTimestamp *time.Time `json:"ts,omitempty"`
ExtraId *string `json:"id,omitempty"`
ExtraPage *int `json:"pg,omitempty"`
ExtraPageSize *int `json:"sz,omitempty"`
}
func NewKeySortToken(valuePrimary string, valueSecondary string, direction SortDirection, directionSecondary SortDirection, pageSize int, extra Extra) CursorToken {
return CTKeySort{
Mode: CTMNormal,
ValuePrimary: valuePrimary,
ValueSecondary: valueSecondary,
Direction: direction,
DirectionSecondary: directionSecondary,
PageSize: pageSize,
Extra: extra,
}
}
func Start() CursorToken {
return CTKeySort{
Mode: CTMStart,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func End() CursorToken {
return CTKeySort{
Mode: CTMEnd,
ValuePrimary: "",
ValueSecondary: "",
Direction: "",
DirectionSecondary: "",
PageSize: 0,
Extra: Extra{},
}
}
func (c CTKeySort) Token() string {
if c.Mode == CTMStart {
return "@start"
}
if c.Mode == CTMEnd {
return "@end"
}
// We kinda manually implement omitempty for the CursorToken here
// because omitempty does not work for time.Time and otherwise we would always
// get weird time values when decoding a token that initially didn't have an Timestamp set
// For this usecase we treat Unix=0 as an empty timestamp
sertok := cursorTokenKeySortSerialize{}
if c.ValuePrimary != "" {
sertok.ValuePrimary = &c.ValuePrimary
}
if c.ValueSecondary != "" {
sertok.ValueSecondary = &c.ValueSecondary
}
if c.Direction != "" {
sertok.Direction = &c.Direction
}
if c.DirectionSecondary != "" {
sertok.DirectionSecondary = &c.DirectionSecondary
}
if c.PageSize != 0 {
sertok.PageSize = &c.PageSize
}
sertok.ExtraTimestamp = c.Extra.Timestamp
sertok.ExtraId = c.Extra.Id
sertok.ExtraPage = c.Extra.Page
sertok.ExtraPageSize = c.Extra.PageSize
body, err := json.Marshal(sertok)
if err != nil {
panic(err)
}
return "tok_" + base32.StdEncoding.EncodeToString(body)
}
func (c CTKeySort) IsEnd() bool {
return c.Mode == CTMEnd
}
func (c CTKeySort) IsStart() bool {
return c.Mode == CTMStart
}
func (c CTKeySort) valuePrimaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValuePrimary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}
func (c CTKeySort) valueSecondaryObjectId() (primitive.ObjectID, bool) {
if oid, err := primitive.ObjectIDFromHex(c.ValueSecondary); err == nil {
return oid, true
} else {
return primitive.ObjectID{}, false
}
}

View File

@@ -0,0 +1,41 @@
package cursortoken
import "strconv"
type CTPaginated struct {
Mode Mode
Page int
}
func Page(p int) CursorToken {
return CTPaginated{
Mode: CTMNormal,
Page: p,
}
}
func PageEnd() CursorToken {
return CTPaginated{
Mode: CTMEnd,
Page: 0,
}
}
func (c CTPaginated) Token() string {
if c.Mode == CTMStart {
return "$1"
}
if c.Mode == CTMEnd {
return "$end"
}
return "$" + strconv.Itoa(c.Page)
}
func (c CTPaginated) IsEnd() bool {
return c.Mode == CTMEnd
}
func (c CTPaginated) IsStart() bool {
return c.Mode == CTMStart || c.Page == 1
}

View File

@@ -115,6 +115,9 @@ func (b *bufferedReadCloser) BufferedAll() ([]byte, error) {
return nil, err
}
}
if err := b.Reset(); err != nil {
return nil, err
}
return b.buffer, nil
case modeSourceFinished:
@@ -131,10 +134,22 @@ func (b *bufferedReadCloser) BufferedAll() ([]byte, error) {
}
}
// Reset resets the buffer to the beginning of the buffer.
// If the original source is partially read, we will finish reading it and fill our buffer
func (b *bufferedReadCloser) Reset() error {
switch b.mode {
case modeSourceReading:
fallthrough
if b.off == 0 {
return nil // nobody has read anything yet
}
err := b.Close()
if err != nil {
return err
}
b.mode = modeBufferReading
b.off = 0
return nil
case modeSourceFinished:
err := b.Close()
if err != nil {

View File

@@ -3,6 +3,7 @@ package dataext
import (
"encoding/json"
"errors"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
type JsonOpt[T any] struct {
@@ -10,6 +11,14 @@ type JsonOpt[T any] struct {
value T
}
func NewJsonOpt[T any](v T) JsonOpt[T] {
return JsonOpt[T]{isSet: true, value: v}
}
func EmptyJsonOpt[T any]() JsonOpt[T] {
return JsonOpt[T]{isSet: false}
}
// MarshalJSON returns m as the JSON encoding of m.
func (m JsonOpt[T]) MarshalJSON() ([]byte, error) {
if !m.isSet {
@@ -51,6 +60,13 @@ func (m JsonOpt[T]) ValueOrNil() *T {
return &m.value
}
func (m JsonOpt[T]) ValueDblPtrOrNil() **T {
if !m.isSet {
return nil
}
return langext.DblPtr(m.value)
}
func (m JsonOpt[T]) MustValue() T {
if !m.isSet {
panic("value not set")

View File

@@ -26,6 +26,20 @@ func (rb *RingBuffer[T]) Push(item T) {
rb.head = (rb.head + 1) % rb.capacity
}
func (rb *RingBuffer[T]) PushPop(item T) *T {
if rb.size < rb.capacity {
rb.size++
rb.items[rb.head] = item
rb.head = (rb.head + 1) % rb.capacity
return nil
} else {
prev := rb.items[rb.head]
rb.items[rb.head] = item
rb.head = (rb.head + 1) % rb.capacity
return &prev
}
}
func (rb *RingBuffer[T]) Peek() (T, bool) {
if rb.size == 0 {
return *new(T), false
@@ -96,3 +110,35 @@ func (rb *RingBuffer[T]) Iter2() iter.Seq2[int, T] {
}
}
}
func (rb *RingBuffer[T]) Remove(fnEqual func(v T) bool) int {
// Mike [2024-11-13]: I *really* tried to write an in-place algorithm to remove elements
// But after carful consideration, I left that as an exercise for future readers
// It is, suprisingly, non-trivial, especially because the head-ptr must be weirdly updated
// And out At() method does not work correctly with {head<>0 && size<capacity}
dc := 0
b := make([]T, rb.capacity)
bsize := 0
for i := 0; i < rb.size; i++ {
comp := rb.At(i)
if fnEqual(comp) {
dc++
} else {
b[bsize] = comp
bsize++
}
}
if dc == 0 {
return 0
}
rb.items = b
rb.size = bsize
rb.head = bsize % rb.capacity
return dc
}

447
dataext/ringBuffer_test.go Normal file
View File

@@ -0,0 +1,447 @@
package dataext
import "testing"
func TestRingBufferPushAddsItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
if rb.Size() != 1 {
t.Errorf("Expected size 1, got %d", rb.Size())
}
if item, _ := rb.Peek(); item != 1 {
t.Errorf("Expected item 1, got %d", item)
}
}
func TestRingBufferPushPopReturnsOldestItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
if item := rb.PushPop(4); item == nil || *item != 1 {
t.Errorf("Expected item 1, got %v", item)
}
}
func TestRingBufferPeekReturnsLastPushedItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
if item, _ := rb.Peek(); item != 2 {
t.Errorf("Expected item 2, got %d", item)
}
}
func TestRingBufferOverflow1(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
if rb.Size() != 5 {
t.Errorf("Expected size 4, got %d", rb.Size())
}
expected := []int{3, 9, 4, 5, 7}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferItemsReturnsAllItems(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
items := rb.Items()
expected := []int{1, 2, 3}
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferClearEmptiesBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Clear()
if rb.Size() != 0 {
t.Errorf("Expected size 0, got %d", rb.Size())
}
}
func TestRingBufferIsFullReturnsTrueWhenFull(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
if !rb.IsFull() {
t.Errorf("Expected buffer to be full")
}
}
func TestRingBufferAtReturnsCorrectItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
if item := rb.At(1); item != 2 {
t.Errorf("Expected item 2, got %d", item)
}
}
func TestRingBufferGetReturnsCorrectItem(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
if item, ok := rb.Get(1); !ok || item != 2 {
t.Errorf("Expected item 2, got %d", item)
}
}
func TestRingBufferRemoveDeletesMatchingItems(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(2)
rb.Push(4)
removed := rb.Remove(func(v int) bool { return v == 2 })
if removed != 2 {
t.Errorf("Expected 2 items removed, got %d", removed)
}
if rb.Size() != 3 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{1, 3, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems2(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(2)
rb.Push(4)
removed := rb.Remove(func(v int) bool { return v == 3 })
if removed != 1 {
t.Errorf("Expected 2 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{1, 2, 2, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems3(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(9)
rb.Push(4)
removed := rb.Remove(func(v int) bool { return v == 3 })
if removed != 1 {
t.Errorf("Expected 2 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{1, 2, 9, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems4(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
removed := rb.Remove(func(v int) bool { return v == 7 })
if removed != 1 {
t.Errorf("Expected 1 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 4, got %d", rb.Size())
}
expected := []int{3, 9, 4, 5}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems5(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
removed := rb.Remove(func(v int) bool { return v == 3 })
if removed != 1 {
t.Errorf("Expected 1 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 4, got %d", rb.Size())
}
expected := []int{9, 4, 5, 7}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveDeletesMatchingItems6(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
removed := rb.Remove(func(v int) bool { return v == 1 })
if removed != 0 {
t.Errorf("Expected 0 items removed, got %d", removed)
}
if rb.Size() != 5 {
t.Errorf("Expected size 5, got %d", rb.Size())
}
expected := []int{3, 9, 4, 5, 7}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
if !rb.IsFull() {
t.Errorf("Expected buffer to not be full")
}
}
func TestRingBufferRemoveDeletesMatchingItems7(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1) // overriden
rb.Push(2) // overriden
rb.Push(3)
rb.Push(9)
rb.Push(4)
rb.Push(5)
rb.Push(7)
removed := rb.Remove(func(v int) bool { return v == 9 })
if removed != 1 {
t.Errorf("Expected 1 items removed, got %d", removed)
}
if rb.Size() != 4 {
t.Errorf("Expected size 4, got %d", rb.Size())
}
expected := []int{3, 4, 5, 7}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
if rb.IsFull() {
t.Errorf("Expected buffer to not be full")
}
}
func TestRingBufferAddItemsToFullRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(4)
if rb.Size() != 3 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{2, 3, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferAddItemsToNonFullRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
if rb.Size() != 2 {
t.Errorf("Expected size 2, got %d", rb.Size())
}
expected := []int{1, 2}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveItemsFromNonFullRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
removed := rb.Remove(func(v int) bool { return v == 1 })
if removed != 1 {
t.Errorf("Expected 1 item removed, got %d", removed)
}
if rb.Size() != 1 {
t.Errorf("Expected size 1, got %d", rb.Size())
}
expected := []int{2}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveItemsFromFullRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
removed := rb.Remove(func(v int) bool { return v == 2 })
if removed != 1 {
t.Errorf("Expected 1 item removed, got %d", removed)
}
if rb.Size() != 2 {
t.Errorf("Expected size 2, got %d", rb.Size())
}
expected := []int{1, 3}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveMultipleItemsFromRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](5)
rb.Push(1)
rb.Push(2)
rb.Push(3)
rb.Push(2)
rb.Push(4)
removed := rb.Remove(func(v int) bool { return v == 2 })
if removed != 2 {
t.Errorf("Expected 2 items removed, got %d", removed)
}
if rb.Size() != 3 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
expected := []int{1, 3, 4}
items := rb.Items()
for i, item := range items {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
}
}
func TestRingBufferRemoveAllItemsFromRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
removed := rb.Remove(func(v int) bool { return true })
if removed != 3 {
t.Errorf("Expected 3 items removed, got %d", removed)
}
if rb.Size() != 0 {
t.Errorf("Expected size 0, got %d", rb.Size())
}
}
func TestRingBufferRemoveNoItemsFromRingBuffer(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
removed := rb.Remove(func(v int) bool { return false })
if removed != 0 {
t.Errorf("Expected 0 items removed, got %d", removed)
}
if rb.Size() != 3 {
t.Errorf("Expected size 3, got %d", rb.Size())
}
}
func TestRingBufferIteratesOverAllItems(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
expected := []int{1, 2, 3}
i := 0
for item := range rb.Iter() {
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
i++
}
if i != len(expected) {
t.Errorf("Expected to iterate over %d items, but iterated over %d", len(expected), i)
}
}
func TestRingBufferIter2IteratesOverAllItemsWithIndices(t *testing.T) {
rb := NewRingBuffer[int](3)
rb.Push(1)
rb.Push(2)
rb.Push(3)
expected := []int{1, 2, 3}
i := 0
for index, item := range rb.Iter2() {
if index != i {
t.Errorf("Expected index %d, got %d", i, index)
}
if item != expected[i] {
t.Errorf("Expected item %d, got %d", expected[i], item)
}
i++
}
if i != len(expected) {
t.Errorf("Expected to iterate over %d items, but iterated over %d", len(expected), i)
}
}

View File

@@ -7,6 +7,10 @@ type SyncMap[TKey comparable, TData any] struct {
lock sync.Mutex
}
func NewSyncMap[TKey comparable, TData any]() *SyncMap[TKey, TData] {
return &SyncMap[TKey, TData]{data: make(map[TKey]TData), lock: sync.Mutex{}}
}
func (s *SyncMap[TKey, TData]) Set(key TKey, data TData) {
s.lock.Lock()
defer s.lock.Unlock()

143
dataext/syncRingSet.go Normal file
View File

@@ -0,0 +1,143 @@
package dataext
import "sync"
type SyncRingSet[TData comparable] struct {
data map[TData]bool
lock sync.Mutex
ring *RingBuffer[TData]
}
func NewSyncRingSet[TData comparable](capacity int) *SyncRingSet[TData] {
return &SyncRingSet[TData]{
data: make(map[TData]bool, capacity+1),
lock: sync.Mutex{},
ring: NewRingBuffer[TData](capacity),
}
}
// Add adds `value` to the set
// returns true if the value was actually inserted (value did not exist beforehand)
// returns false if the value already existed
func (s *SyncRingSet[TData]) Add(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
_, existsInPreState := s.data[value]
if existsInPreState {
return false
}
prev := s.ring.PushPop(value)
s.data[value] = true
if prev != nil {
delete(s.data, *prev)
}
return true
}
func (s *SyncRingSet[TData]) AddAll(values []TData) {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
for _, value := range values {
_, existsInPreState := s.data[value]
if existsInPreState {
continue
}
prev := s.ring.PushPop(value)
s.data[value] = true
if prev != nil {
delete(s.data, *prev)
}
}
}
func (s *SyncRingSet[TData]) Remove(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
_, existsInPreState := s.data[value]
if !existsInPreState {
return false
}
delete(s.data, value)
s.ring.Remove(func(v TData) bool { return value == v })
return true
}
func (s *SyncRingSet[TData]) RemoveAll(values []TData) {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
for _, value := range values {
delete(s.data, value)
s.ring.Remove(func(v TData) bool { return value == v })
}
}
func (s *SyncRingSet[TData]) Contains(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
_, ok := s.data[value]
return ok
}
func (s *SyncRingSet[TData]) Get() []TData {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
r := make([]TData, 0, len(s.data))
for k := range s.data {
r = append(r, k)
}
return r
}
// AddIfNotContains
// returns true if the value was actually added (value did not exist beforehand)
// returns false if the value already existed
func (s *SyncRingSet[TData]) AddIfNotContains(key TData) bool {
return s.Add(key)
}
// RemoveIfContains
// returns true if the value was actually removed (value did exist beforehand)
// returns false if the value did not exist in the set
func (s *SyncRingSet[TData]) RemoveIfContains(key TData) bool {
return s.Remove(key)
}

View File

@@ -7,8 +7,12 @@ type SyncSet[TData comparable] struct {
lock sync.Mutex
}
func NewSyncSet[TData comparable]() *SyncSet[TData] {
return &SyncSet[TData]{data: make(map[TData]bool), lock: sync.Mutex{}}
}
// Add adds `value` to the set
// returns true if the value was actually inserted
// returns true if the value was actually inserted (value did not exist beforehand)
// returns false if the value already existed
func (s *SyncSet[TData]) Add(value TData) bool {
s.lock.Lock()
@@ -19,9 +23,12 @@ func (s *SyncSet[TData]) Add(value TData) bool {
}
_, existsInPreState := s.data[value]
s.data[value] = true
if existsInPreState {
return false
}
return !existsInPreState
s.data[value] = true
return true
}
func (s *SyncSet[TData]) AddAll(values []TData) {
@@ -37,6 +44,36 @@ func (s *SyncSet[TData]) AddAll(values []TData) {
}
}
func (s *SyncSet[TData]) Remove(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
_, existsInPreState := s.data[value]
if !existsInPreState {
return false
}
delete(s.data, value)
return true
}
func (s *SyncSet[TData]) RemoveAll(values []TData) {
s.lock.Lock()
defer s.lock.Unlock()
if s.data == nil {
s.data = make(map[TData]bool)
}
for _, value := range values {
delete(s.data, value)
}
}
func (s *SyncSet[TData]) Contains(value TData) bool {
s.lock.Lock()
defer s.lock.Unlock()
@@ -66,3 +103,17 @@ func (s *SyncSet[TData]) Get() []TData {
return r
}
// AddIfNotContains
// returns true if the value was actually added (value did not exist beforehand)
// returns false if the value already existed
func (s *SyncSet[TData]) AddIfNotContains(key TData) bool {
return s.Add(key)
}
// RemoveIfContains
// returns true if the value was actually removed (value did exist beforehand)
// returns false if the value did not exist in the set
func (s *SyncSet[TData]) RemoveIfContains(key TData) bool {
return s.Remove(key)
}

View File

@@ -13,6 +13,25 @@ var reflectTypeStr = reflect.TypeOf("")
func FromError(err error) *ExErr {
if err == nil {
// prevent NPE if we call FromError with err==nil
return &ExErr{
UniqueID: newID(),
Category: CatForeign,
Type: TypeInternal,
Severity: SevErr,
Timestamp: time.Time{},
StatusCode: nil,
Message: "",
WrappedErrType: "nil",
WrappedErr: err,
Caller: "",
OriginalError: nil,
Meta: make(MetaMap),
Extra: make(map[string]any),
}
}
//goland:noinspection GoTypeAssertionOnErrors
if verr, ok := err.(*ExErr); ok {
// A simple ExErr
@@ -34,8 +53,8 @@ func FromError(err error) *ExErr {
Caller: "",
OriginalError: nil,
Meta: MetaMap{
"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr)},
"panic_type": {DataType: MDTString, Value: fmt.Sprintf("%T", verr)},
"panic_object": {DataType: MDTString, Value: fmt.Sprintf("%+v", verr.RecoveredObj())},
"panic_type": {DataType: MDTString, Value: fmt.Sprintf("%T", verr.RecoveredObj())},
"stack": {DataType: MDTString, Value: verr.Stack},
},
Extra: make(map[string]any),

View File

@@ -160,8 +160,8 @@ func (ee *ExErr) FormatLog(lvl LogPrintLevel) string {
for curr := ee; curr != nil; curr = curr.OriginalError {
indent += " "
etype := ee.Type.Key
if ee.Type == TypeWrap {
etype := curr.Type.Key
if curr.Type == TypeWrap {
etype = "~"
}
@@ -370,6 +370,14 @@ func (ee *ExErr) GetExtra(key string) (any, bool) {
return nil, false
}
func (ee *ExErr) UniqueIDs() []string {
ids := []string{ee.UniqueID}
for curr := ee; curr != nil; curr = curr.OriginalError {
ids = append(ids, curr.UniqueID)
}
return ids
}
// contains test if the supplied error is contained in this error (anywhere in the chain)
func (ee *ExErr) contains(original *ExErr) (*ExErr, bool) {
if original == nil {

View File

@@ -100,6 +100,17 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) {
if pctx.body != nil {
if pctx.ginCtx.ContentType() == "application/json" {
if brc, ok := pctx.body.(dataext.BufferedReadCloser); ok {
// Ensures a fully reset (offset=0) buffer before parsing
err := brc.Reset()
if err != nil {
err = exerr.Wrap(err, "Failed to read (brc.reset) json-body").
WithType(exerr.TypeBindFailJSON).
Str("struct_type", fmt.Sprintf("%T", pctx.body)).
Build()
return nil, nil, langext.Ptr(pctx.wrapper.buildRequestBindError(pctx.ginCtx, "JSON", err))
}
}
if err := pctx.ginCtx.ShouldBindJSON(pctx.body); err != nil {
err = exerr.Wrap(err, "Failed to read json-body").
WithType(exerr.TypeBindFailJSON).

30
go.mod
View File

@@ -9,34 +9,34 @@ require (
github.com/rs/xid v1.6.0
github.com/rs/zerolog v1.33.0
go.mongodb.org/mongo-driver v1.17.1
golang.org/x/crypto v0.28.0
golang.org/x/sys v0.26.0
golang.org/x/term v0.25.0
golang.org/x/crypto v0.31.0
golang.org/x/sys v0.28.0
golang.org/x/term v0.27.0
)
require (
github.com/disintegration/imaging v1.6.2
github.com/jung-kurt/gofpdf v1.16.2
golang.org/x/sync v0.8.0
golang.org/x/sync v0.10.0
)
require (
github.com/bytedance/sonic v1.12.3 // indirect
github.com/bytedance/sonic/loader v0.2.0 // indirect
github.com/bytedance/sonic v1.12.6 // indirect
github.com/bytedance/sonic/loader v0.2.1 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gabriel-vasile/mimetype v1.4.5 // indirect
github.com/gabriel-vasile/mimetype v1.4.7 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.22.1 // indirect
github.com/goccy/go-json v0.10.3 // indirect
github.com/go-playground/validator/v10 v10.23.0 // indirect
github.com/goccy/go-json v0.10.4 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/uuid v1.5.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
github.com/klauspost/cpuid/v2 v2.2.9 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
@@ -51,11 +51,11 @@ require (
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
golang.org/x/arch v0.11.0 // indirect
golang.org/x/image v0.21.0 // indirect
golang.org/x/net v0.30.0 // indirect
golang.org/x/text v0.19.0 // indirect
google.golang.org/protobuf v1.35.1 // indirect
golang.org/x/arch v0.12.0 // indirect
golang.org/x/image v0.23.0 // indirect
golang.org/x/net v0.33.0 // indirect
golang.org/x/text v0.21.0 // indirect
google.golang.org/protobuf v1.36.1 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
modernc.org/libc v1.37.6 // indirect
modernc.org/mathutil v1.6.0 // indirect

56
go.sum
View File

@@ -3,9 +3,17 @@ filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/bytedance/sonic v1.12.3 h1:W2MGa7RCU1QTeYRTPE3+88mVC0yXmsRQRChiyVocVjU=
github.com/bytedance/sonic v1.12.3/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k=
github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.5 h1:hoZxY8uW+mT+OpkcUWw4k0fDINtOcVavEsGfzwzFU/w=
github.com/bytedance/sonic v1.12.5/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic v1.12.6 h1:/isNmCUF2x3Sh8RAp/4mh4ZGkcFAX/hLrzrK3AvpRzk=
github.com/bytedance/sonic v1.12.6/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM=
github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E=
github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
@@ -20,6 +28,10 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4=
github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4=
github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc=
github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc=
github.com/gabriel-vasile/mimetype v1.4.7 h1:SKFKl7kD0RiPdbht0s7hFtjl489WcQ1VyPW8ZzUMYCA=
github.com/gabriel-vasile/mimetype v1.4.7/go.mod h1:GDlAgAyIRT27BhFl53XNAFtfjzOkLaF35JdEG0P7LtU=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
@@ -34,10 +46,14 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o=
github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM=
github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
@@ -62,6 +78,8 @@ github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY=
github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
@@ -125,24 +143,46 @@ go.mongodb.org/mongo-driver v1.17.1 h1:Wic5cJIwJgSpBhe3lx3+/RybR5PiYRMpVFgO7cOHy
go.mongodb.org/mongo-driver v1.17.1/go.mod h1:wwWm/+BuOddhcq3n68LKRmgk2wXzmF6s0SFOa0GINL4=
golang.org/x/arch v0.11.0 h1:KXV8WWKCXm6tRpLirl2szsO5j/oOODwZf4hATmGVNs4=
golang.org/x/arch v0.11.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/arch v0.12.0 h1:UsYJhbzPYGsT0HbEdmYcqtCv8UNGvnaL561NnIUvaKg=
golang.org/x/arch v0.12.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY=
golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s=
golang.org/x/image v0.21.0/go.mod h1:vUbsLavqK/W303ZroQQVKQ+Af3Yl6Uz1Ppu5J/cLz78=
golang.org/x/image v0.22.0 h1:UtK5yLUzilVrkjMAZAZ34DXGpASN8i8pj8g+O+yd10g=
golang.org/x/image v0.22.0/go.mod h1:9hPFhljd4zZ1GNSIZJ49sqbp45GKK9t6w+iXvGqZUz4=
golang.org/x/image v0.23.0 h1:HseQ7c2OpPKTPVzNjG5fwJsOTCiiwS4QdsYi5XU6H68=
golang.org/x/image v0.23.0/go.mod h1:wJJBTdLfCCf3tiHa1fNxpZmUI4mmoZvwMCPP0ddoNKY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4=
golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo=
golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM=
golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI=
golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -154,16 +194,28 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24=
golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU=
golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E=
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
@@ -172,6 +224,10 @@ google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6h
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA=
google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io=
google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/gk=
google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -1,5 +1,5 @@
package goext
const GoextVersion = "0.0.531"
const GoextVersion = "0.0.550"
const GoextVersionTimestamp = "2024-10-13T16:10:55+0200"
const GoextVersionTimestamp = "2024-12-26T00:23:24+0100"

View File

@@ -401,6 +401,14 @@ func ArrSum[T NumberConstraint](arr []T) T {
return r
}
func ArrMapSum[T1 any, T2 NumberConstraint](arr []T1, conv func(v T1) T2) T2 {
var r T2 = 0
for _, v := range arr {
r += conv(v)
}
return r
}
func ArrFlatten[T1 any, T2 any](arr []T1, conv func(v T1) []T2) []T2 {
r := make([]T2, 0, len(arr))
for _, v1 := range arr {

View File

@@ -4,11 +4,13 @@ import (
"encoding/json"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"reflect"
"strings"
)
type ConvertStructToMapOpt struct {
KeepJsonMarshalTypes bool
MaxDepth *int
UseTagsAsKeys *string
}
func ConvertStructToMap(v any, opts ...ConvertStructToMapOpt) map[string]any {
@@ -90,7 +92,21 @@ func reflectToMap(fv reflect.Value, depth int, opt ConvertStructToMapOpt) any {
for i := 0; i < fv.NumField(); i++ {
if fv.Type().Field(i).IsExported() {
res[fv.Type().Field(i).Name] = reflectToMap(fv.Field(i), depth+1, opt)
k := fv.Type().Field(i).Name
if opt.UseTagsAsKeys != nil {
if tagval, ok := fv.Type().Field(i).Tag.Lookup(*opt.UseTagsAsKeys); ok {
if strings.Contains(tagval, ",") {
k = strings.TrimSpace(strings.Split(tagval, ",")[0])
} else {
k = strings.TrimSpace(tagval)
}
} else {
continue
}
}
res[k] = reflectToMap(fv.Field(i), depth+1, opt)
}
}

View File

@@ -120,25 +120,25 @@ func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirecti
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)
if err != nil {
return ct.CursorToken{}, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
return nil, exerr.Wrap(err, "failed to get (primary) field-value as token-string").Type("lastEntity", lastEntity).Str("fieldPrimary", fieldPrimary).Build()
}
valueSeconary := ""
if fieldSecondary != nil && dirSecondary != nil {
valueSeconary, err = c.getFieldValueAsTokenString(lastEntity, *fieldSecondary)
if err != nil {
return ct.CursorToken{}, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
return nil, exerr.Wrap(err, "failed to get (secondary) field-value as token-string").Type("lastEntity", lastEntity).StrPtr("fieldSecondary", fieldSecondary).Build()
}
}
return ct.CursorToken{
Mode: ct.CTMNormal,
ValuePrimary: valuePrimary,
ValueSecondary: valueSeconary,
Direction: dirPrimary,
PageSize: langext.Coalesce(pageSize, 0),
Extra: ct.Extra{},
}, nil
return ct.NewKeySortToken(
valuePrimary,
valueSeconary,
dirPrimary,
dirPrimary,
langext.Coalesce(pageSize, 0),
ct.Extra{},
), nil
}
func (c *Coll[TData]) needsDoubleSort(ctx context.Context) bool {

View File

@@ -10,6 +10,28 @@ import (
)
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
if inTok == nil {
inTok = ct.Start()
}
if ctks, ok := inTok.(ct.CTKeySort); ok {
d, tok, err := c.listWithKSToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else if ctks, ok := inTok.(ct.CTPaginated); ok {
d, tok, err := c.listWithPaginatedToken(ctx, filter, pageSize, ctks)
if err != nil {
return nil, ct.End(), err
}
return d, tok, nil
} else {
return nil, ct.End(), exerr.New(exerr.TypeCursorTokenDecode, "unknown ct type").Any("token", inTok).Type("tokenType", inTok).Build()
}
}
func (c *Coll[TData]) listWithKSToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTKeySort) ([]TData, ct.CursorToken, error) {
if inTok.Mode == ct.CTMEnd {
return make([]TData, 0), ct.End(), nil
}
@@ -41,7 +63,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
paginationPipeline, doubleSortPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
if err != nil {
return nil, ct.CursorToken{}, exerr.
return nil, nil, exerr.
Wrap(err, "failed to create pagination").
WithType(exerr.TypeCursorTokenDecode).
Str("collection", c.Name()).
@@ -66,7 +88,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
defer func() { _ = cursor.Close(ctx) }()
@@ -75,7 +97,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
if pageSize == nil {
entries, err := c.decodeAll(ctx, cursor)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to all-decode entities").Build()
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
}
return entries, ct.End(), nil
}
@@ -85,7 +107,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
var entry TData
entry, err = c.decodeSingle(ctx, cursor)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to decode entity").Build()
return nil, nil, exerr.Wrap(err, "failed to decode entity").Build()
}
entities = append(entities, entry)
}
@@ -100,12 +122,70 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
if err != nil {
return nil, ct.CursorToken{}, exerr.Wrap(err, "failed to create (out)-token").Build()
return nil, nil, exerr.Wrap(err, "failed to create (out)-token").Build()
}
return entities, nextToken, nil
}
func (c *Coll[TData]) listWithPaginatedToken(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CTPaginated) ([]TData, ct.CursorToken, error) {
var err error
page := inTok.Page
if page < 0 {
page = 1
}
pipelineSort := mongo.Pipeline{}
pipelineFilter := mongo.Pipeline{}
if filter != nil {
pipelineFilter = filter.FilterQuery(ctx)
pf1, pd1, pf2, pd2 := filter.Pagination(ctx)
pipelineSort, err = createSortOnlyPipeline(pf1, pd1, &pf2, &pd2)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to create sort pipeline").Build()
}
}
pipelinePaginate := mongo.Pipeline{}
if pageSize != nil {
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *pageSize * (page - 1)}})
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *pageSize}})
} else {
page = 1
}
pipelineCount := mongo.Pipeline{}
pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}})
extrModPipelineResolved := mongo.Pipeline{}
for _, ppl := range c.extraModPipeline {
extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx))
}
pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)
if err != nil {
return nil, nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipelineList).Str("collection", c.Name()).Build()
}
entities, err := c.decodeAll(ctx, cursorList)
if err != nil {
return nil, nil, exerr.Wrap(err, "failed to all-decode entities").Build()
}
tokOut := ct.Page(page + 1)
if pageSize == nil || len(entities) < *pageSize {
tokOut = ct.PageEnd()
}
return entities, tokOut, nil
}
func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, error) {
type countRes struct {
Count int64 `bson:"c"`
@@ -135,17 +215,44 @@ func (c *Coll[TData]) Count(ctx context.Context, filter ct.RawFilter) (int64, er
}
func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, int64, error) {
// NOTE: Possible optimization: Cache count in CursorToken, then fetch count only on first page.
count, err := c.Count(ctx, filter)
if err != nil {
return nil, ct.CursorToken{}, 0, err
}
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, ct.CursorToken{}, 0, err
if pageSize != nil && *pageSize == 0 {
// fast track, we return an empty list and do not advance the cursor token
count, err := c.Count(ctx, filter)
if err != nil {
return nil, nil, 0, err
}
return make([]TData, 0), inTok, count, nil
} else if pageSize == nil && inTok.IsStart() {
// fast track, we simply return len(entries) for count (we query all anyway)
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, nil, 0, err
}
return data, token, int64(len(data)), nil
} else {
count, err := c.Count(ctx, filter)
if err != nil {
return nil, nil, 0, err
}
data, token, err := c.List(ctx, filter, pageSize, inTok)
if err != nil {
return nil, nil, 0, err
}
return data, token, count, nil
}
return data, token, count, nil
}
func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]string, error) {
@@ -184,7 +291,7 @@ func (c *Coll[TData]) ListAllIDs(ctx context.Context, filter ct.RawFilter) ([]st
return langext.ArrMap(res, func(v idObject) string { return v.ID }), nil
}
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) {
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CTKeySort, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, []bson.D, error) {
cond := bson.A{}
sort := bson.D{}
@@ -265,3 +372,33 @@ func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken
return pipeline, pipelineSort, nil
}
func createSortOnlyPipeline(fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection) ([]bson.D, error) {
sort := bson.D{}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
if *sortSecondary == ct.SortASC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
sort = append(sort, bson.E{Key: *fieldSecondary, Value: -1})
}
}
pipelineSort := mongo.Pipeline{bson.D{{Key: "$sort", Value: sort}}}
return pipelineSort, nil
}