Compare commits

...

13 Commits

Author SHA1 Message Date
bfe62799d3 v0.0.314
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2023-11-10 13:37:55 +01:00
ede912eb7b v0.0.313
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2023-11-10 13:26:30 +01:00
ff8f128fe8 v0.0.312 improve exerr.RecursiveMessage()
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m23s
2023-11-10 10:16:31 +01:00
1971f1396f v0.0.311 BF
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m17s
2023-11-09 11:48:45 +01:00
bf6c184d12 v0.0.310 debug
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m18s
2023-11-09 11:40:48 +01:00
770f5c5c64 v0.0.309
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m17s
2023-11-09 10:17:29 +01:00
623c021689 v0.0.308
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m20s
2023-11-09 10:02:31 +01:00
afcc89bf9e v0.0.307
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m20s
2023-11-09 10:00:01 +01:00
1672e8f8fd v0.0.306
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m12s
2023-11-09 09:36:41 +01:00
398ed56d32 v0.0.305
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m21s
2023-11-09 09:35:56 +01:00
f3ecba3883 v0.0.304 add support for WithModifyingPipeline to wmo
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m22s
2023-11-09 09:26:46 +01:00
45031b05cf v0.0.303
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m35s
2023-11-08 19:01:15 +01:00
7413ea045d v0.0.302
Some checks failed
Build Docker and Deploy / Run goext test-suite (push) Failing after 1m41s
2023-11-08 18:53:02 +01:00
21 changed files with 701 additions and 206 deletions

View File

@@ -46,6 +46,7 @@ var (
TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500))
TypeMongoFilter = NewType("MONGO_FILTER", langext.Ptr(500))
TypeMongoReflection = NewType("MONGO_REFLECTION", langext.Ptr(500))
TypeMongoInvalidOpt = NewType("MONGO_INVALIDOPT", langext.Ptr(500))
TypeWrap = NewType("Wrap", nil)

View File

@@ -169,14 +169,32 @@ func (ee *ExErr) ShortLog(evt *zerolog.Event) {
// RecursiveMessage returns the message to show
// = first error (top-down) that is not wrapping/foreign/empty
// = lowest level error (that is not empty)
// = fallback to self.message
func (ee *ExErr) RecursiveMessage() string {
// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty
for curr := ee; curr != nil; curr = curr.OriginalError {
if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign {
return curr.Message
}
}
// fallback to self
// ==== [2] ==== lowest level error (that is not empty)
deepestMsg := ""
for curr := ee; curr != nil; curr = curr.OriginalError {
if curr.Message != "" {
deepestMsg = curr.Message
}
}
if deepestMsg != "" {
return deepestMsg
}
// ==== [3] ==== fallback to self.message
return ee.Message
}

4
go.mod
View File

@@ -8,7 +8,7 @@ require (
github.com/rs/xid v1.5.0
github.com/rs/zerolog v1.31.0
go.mongodb.org/mongo-driver v1.13.0
golang.org/x/crypto v0.14.0
golang.org/x/crypto v0.15.0
golang.org/x/sys v0.14.0
golang.org/x/term v0.14.0
)
@@ -41,7 +41,7 @@ require (
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
golang.org/x/arch v0.6.0 // indirect
golang.org/x/net v0.17.0 // indirect
golang.org/x/net v0.18.0 // indirect
golang.org/x/sync v0.5.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect

4
go.sum
View File

@@ -131,6 +131,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA=
golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@@ -141,6 +143,8 @@ golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos=
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg=
golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=

View File

@@ -1,5 +1,5 @@
package goext
const GoextVersion = "0.0.301"
const GoextVersion = "0.0.314"
const GoextVersionTimestamp = "2023-11-08T18:30:30+0100"
const GoextVersionTimestamp = "2023-11-10T13:37:54+0100"

View File

@@ -18,6 +18,15 @@ func CreateGoExtBsonRegistry() *bsoncodec.Registry {
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{})
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{})
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixTime{}), rfctime.UnixTime{})
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixTime{}), rfctime.UnixTime{})
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixMilliTime{}), rfctime.UnixMilliTime{})
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixMilliTime{}), rfctime.UnixMilliTime{})
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixNanoTime{}), rfctime.UnixNanoTime{})
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixNanoTime{}), rfctime.UnixNanoTime{})
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.Date{}), rfctime.Date{})
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.Date{}), rfctime.Date{})

View File

@@ -1,11 +1,28 @@
package pagination
import (
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
)
type Filter interface {
FilterQuery() mongo.Pipeline
Pagination() (string, ct.SortDirection)
Sort() bson.D
}
type dynamicFilter struct {
pipeline mongo.Pipeline
sort bson.D
}
func (d dynamicFilter) FilterQuery() mongo.Pipeline {
return d.pipeline
}
func (d dynamicFilter) Sort() bson.D {
return d.sort
}
func CreateFilter(pipeline mongo.Pipeline, sort bson.D) Filter {
return dynamicFilter{pipeline: pipeline, sort: sort}
}

View File

@@ -2,7 +2,14 @@ package rfctime
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
"reflect"
"time"
)
@@ -54,6 +61,63 @@ func (d SecondsF64) MarshalJSON() ([]byte, error) {
return json.Marshal(secs)
}
func (d *SecondsF64) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*d = SecondsF64(0)
return nil
}
if bt != bson.TypeDouble {
return errors.New(fmt.Sprintf("cannot unmarshal %v into SecondsF64", bt))
}
var tt float64
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
*d = SecondsF64(tt)
return nil
}
func (d SecondsF64) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(d.Seconds())
}
func (d SecondsF64) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = d.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&d))
} else {
val.Set(reflect.ValueOf(d))
}
return nil
}
func NewSecondsF64(t time.Duration) SecondsF64 {
return SecondsF64(t)
}

View File

@@ -2,6 +2,13 @@ package rfctime
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"reflect"
"strconv"
"time"
)
@@ -59,6 +66,63 @@ func (t *UnixTime) UnmarshalText(data []byte) error {
return nil
}
func (t *UnixTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*t = UnixTime{}
return nil
}
if bt != bson.TypeDateTime {
return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixTime", bt))
}
var tt time.Time
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
*t = UnixTime(tt)
return nil
}
func (t UnixTime) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(time.Time(t))
}
func (t UnixTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = t.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&t))
} else {
val.Set(reflect.ValueOf(t))
}
return nil
}
func (t UnixTime) Serialize() string {
return strconv.FormatInt(t.Time().Unix(), 10)
}

View File

@@ -2,6 +2,13 @@ package rfctime
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"reflect"
"strconv"
"time"
)
@@ -59,6 +66,63 @@ func (t *UnixMilliTime) UnmarshalText(data []byte) error {
return nil
}
func (t *UnixMilliTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*t = UnixMilliTime{}
return nil
}
if bt != bson.TypeDateTime {
return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixMilliTime", bt))
}
var tt time.Time
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
*t = UnixMilliTime(tt)
return nil
}
func (t UnixMilliTime) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(time.Time(t))
}
func (t UnixMilliTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = t.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&t))
} else {
val.Set(reflect.ValueOf(t))
}
return nil
}
func (t UnixMilliTime) Serialize() string {
return strconv.FormatInt(t.Time().UnixMilli(), 10)
}

View File

@@ -2,6 +2,13 @@ package rfctime
import (
"encoding/json"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/bsoncodec"
"go.mongodb.org/mongo-driver/bson/bsonrw"
"go.mongodb.org/mongo-driver/bson/bsontype"
"reflect"
"strconv"
"time"
)
@@ -59,6 +66,63 @@ func (t *UnixNanoTime) UnmarshalText(data []byte) error {
return nil
}
func (t *UnixNanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
if bt == bson.TypeNull {
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
// https://stackoverflow.com/questions/75167597
// https://jira.mongodb.org/browse/GODRIVER-2252
*t = UnixNanoTime{}
return nil
}
if bt != bson.TypeDateTime {
return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixNanoTime", bt))
}
var tt time.Time
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
if err != nil {
return err
}
*t = UnixNanoTime(tt)
return nil
}
func (t UnixNanoTime) MarshalBSONValue() (bsontype.Type, []byte, error) {
return bson.MarshalValue(time.Time(t))
}
func (t UnixNanoTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
if val.Kind() == reflect.Ptr && val.IsNil() {
if !val.CanSet() {
return errors.New("ValueUnmarshalerDecodeValue")
}
val.Set(reflect.New(val.Type().Elem()))
}
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr && len(src) == 0 {
val.Set(reflect.Zero(val.Type()))
return nil
}
err = t.UnmarshalBSONValue(tp, src)
if err != nil {
return err
}
if val.Kind() == reflect.Ptr {
val.Set(reflect.ValueOf(&t))
} else {
val.Set(reflect.ValueOf(t))
}
return nil
}
func (t UnixNanoTime) Serialize() string {
return strconv.FormatInt(t.Time().UnixNano(), 10)
}

View File

@@ -45,6 +45,7 @@ type Coll[TData any] struct {
customDecoder *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface)
isInterfaceDataType bool // true if TData is an interface (not a struct)
unmarshalHooks []func(d TData) TData // called for every object after unmarshalling
extraModPipeline mongo.Pipeline // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc
}
func (c *Coll[TData]) Collection() *mongo.Collection {
@@ -81,6 +82,12 @@ func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] {
return c
}
func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] {
c.extraModPipeline = append(c.extraModPipeline, p...)
return c
}
func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) {
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)

View File

@@ -2,76 +2,88 @@ package wmo
import (
"context"
"go.mongodb.org/mongo-driver/bson"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
)
func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) {
if c.customDecoder != nil {
res, err := (*c.customDecoder)(ctx, dec)
var res TData
var err error
if c.customDecoder != nil {
res, err = (*c.customDecoder)(ctx, dec)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
}
for _, hook := range c.unmarshalHooks {
res = hook(res)
}
return res, nil
} else {
var res TData
err := dec.Decode(&res)
err = dec.Decode(&res)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build()
}
for _, hook := range c.unmarshalHooks {
res = hook(res)
}
return res, nil
}
for _, hook := range c.unmarshalHooks {
res = hook(res)
}
return res, nil
}
func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) {
res := make([]TData, 0, cursor.RemainingBatchLength())
if c.customDecoder != nil {
res := make([]TData, 0, cursor.RemainingBatchLength())
for cursor.Next(ctx) {
entry, err := (*c.customDecoder)(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "failed to decode entity with custom-decoder").Type("decoder", *c.customDecoder).Build()
}
for _, hook := range c.unmarshalHooks {
entry = hook(entry)
}
res = append(res, entry)
}
return res, nil
} else {
res := make([]TData, 0, cursor.RemainingBatchLength())
err := cursor.All(ctx, &res)
if err != nil {
return nil, exerr.Wrap(err, "failed to batch-decode entity").Type("target-type", res).Build()
}
for i := 0; i < len(res); i++ {
for _, hook := range c.unmarshalHooks {
res[i] = hook(res[i])
}
}
return res, nil
}
for i := 0; i < len(res); i++ {
for _, hook := range c.unmarshalHooks {
res[i] = hook(res[i])
}
}
return res, nil
}
func (c *Coll[TData]) decodeSingleOrRequery(ctx context.Context, dec Decodable) (TData, error) {
if c.extraModPipeline == nil {
// simple case, we can just decode the result and return it
return c.decodeSingle(ctx, dec)
} else {
// annyoing case, we have a extraModPipeline and need to re-query the document such that the extraModPipeline is applied...
type genDoc struct {
ID any `bson:"_id"`
}
var res genDoc
err := dec.Decode(&res)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").Build()
}
v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).Build()
}
return *v, nil
}
}

View File

@@ -1,87 +0,0 @@
package wmo
import (
"go.mongodb.org/mongo-driver/bson"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
)
func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
cond := bson.A{}
sort := bson.D{}
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
}
if *sortSecondary == ct.SortASC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
}
pipeline := make([]bson.D, 0, 3)
if token.Mode == ct.CTMStart {
// no gt/lt condition
} else if token.Mode == ct.CTMNormal {
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
} else if token.Mode == ct.CTMEnd {
// false
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
} else {
return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
}
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
if pageSize != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
}
return pipeline, nil
}

View File

@@ -5,9 +5,13 @@ import (
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
@@ -22,6 +26,9 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op
}
func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()
@@ -39,6 +46,9 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli
}
func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) {
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Any("options", opts).Str("collection", c.Name()).Build()

View File

@@ -2,69 +2,56 @@ package wmo
import (
"context"
"errors"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
mongoRes := c.coll.FindOne(ctx, filter)
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").
Str("collection", c.Name()).
Any("filter", filter).
Build()
}
return c.decodeSingle(ctx, mongoRes)
}
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
mongoRes := c.coll.FindOne(ctx, filter)
res, err := c.decodeSingle(ctx, mongoRes)
if errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil
}
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Str("collection", c.Name()).Build()
}
return &res, nil
}
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
if err := mongoRes.Err(); err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").
Str("collection", c.Name()).
Id("id", id).
Build()
}
return c.decodeSingle(ctx, mongoRes)
}
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
res, err := c.decodeSingle(ctx, mongoRes)
if errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil
}
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
}
return &res, nil
}
func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) {
cursor, err := c.coll.Find(ctx, filter, opts...)
pipeline := mongo.Pipeline{}
pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}})
for _, opt := range opts {
if opt != nil && opt.Sort != nil {
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: opt.Sort}})
}
}
for _, opt := range opts {
if opt != nil && opt.Skip != nil {
pipeline = append(pipeline, bson.D{{Key: "$skip", Value: *opt.Skip}})
}
}
for _, opt := range opts {
if opt != nil && opt.Limit != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: *opt.Limit}})
}
}
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
for _, opt := range opts {
if opt != nil && opt.Projection != nil {
pipeline = append(pipeline, bson.D{{Key: "$project", Value: opt.Projection}})
}
}
convOpts := make([]*options.AggregateOptions, 0, len(opts))
for _, v := range opts {
vConv, err := convertFindOpt(v)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
convOpts = append(convOpts, vConv)
}
cursor, err := c.coll.Aggregate(ctx, pipeline, convOpts...)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Any("filter", filter).Any("opts", opts).Str("collection", c.Name()).Build()
return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
res, err := c.decodeAll(ctx, cursor)
@@ -74,3 +61,66 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.
return res, nil
}
// converts FindOptions to AggregateOptions
func convertFindOpt(v *options.FindOptions) (*options.AggregateOptions, error) {
if v == nil {
return nil, nil
}
r := options.Aggregate()
if v.AllowDiskUse != nil {
r.SetAllowDiskUse(*v.AllowDiskUse)
}
if v.AllowPartialResults != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'AllowPartialResults' (cannot convert to AggregateOptions)").Build()
}
if v.BatchSize != nil {
r.SetBatchSize(*v.BatchSize)
}
if v.Collation != nil {
r.SetCollation(v.Collation)
}
if v.Comment != nil {
r.SetComment(*v.Comment)
}
if v.CursorType != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'CursorType' (cannot convert to AggregateOptions)").Build()
}
if v.Hint != nil {
r.SetHint(v.Hint)
}
if v.Max != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Max' (cannot convert to AggregateOptions)").Build()
}
if v.MaxAwaitTime != nil {
r.SetMaxAwaitTime(*v.MaxAwaitTime)
}
if v.MaxTime != nil {
r.SetMaxTime(*v.MaxTime)
}
if v.Min != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Min' (cannot convert to AggregateOptions)").Build()
}
if v.NoCursorTimeout != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'NoCursorTimeout' (cannot convert to AggregateOptions)").Build()
}
if v.OplogReplay != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'OplogReplay' (cannot convert to AggregateOptions)").Build()
}
if v.ReturnKey != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'ReturnKey' (cannot convert to AggregateOptions)").Build()
}
if v.ShowRecordID != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'ShowRecordID' (cannot convert to AggregateOptions)").Build()
}
if v.Snapshot != nil {
return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Snapshot' (cannot convert to AggregateOptions)").Build()
}
if v.Let != nil {
r.SetLet(v.Let)
}
return r, nil
}

93
wmo/queryFindOne.go Normal file
View File

@@ -0,0 +1,93 @@
package wmo
import (
"context"
"errors"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
)
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
r, err := c.findOneInternal(ctx, filter, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one] failed").Str("collection", c.Name()).Build()
}
return *r, nil
}
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
r, err := c.findOneInternal(ctx, filter, true)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt] failed").Str("collection", c.Name()).Build()
}
return r, nil
}
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
r, err := c.findOneInternal(ctx, bson.M{"_id": id}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[find-one-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
}
return *r, nil
}
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
r, err := c.findOneInternal(ctx, bson.M{"_id": id}, true)
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one-opt-by-id] failed").Id("id", id).Str("collection", c.Name()).Build()
}
return r, nil
}
func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowNull bool) (*TData, error) {
if len(c.extraModPipeline) == 0 {
// simple case, use mongo FindOne
mongoRes := c.coll.FindOne(ctx, filter)
res, err := c.decodeSingle(ctx, mongoRes)
if allowNull && errors.Is(err, mongo.ErrNoDocuments) {
return nil, nil
}
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).Build()
}
return &res, nil
} else {
// complex case, we one ore more additional pipeline stages, convert to aggregation
pipeline := mongo.Pipeline{}
pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}})
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}})
pipeline = langext.ArrConcat(pipeline, c.extraModPipeline)
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
if cursor.Next(ctx) {
v, err := c.decodeSingle(ctx, cursor)
if err != nil {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
return &v, nil
} else if allowNull {
return nil, nil
} else {
return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build()
}
}
}

View File

@@ -14,9 +14,25 @@ func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, erro
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID})
r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
return *r, nil
}
return c.decodeSingle(ctx, mongoRes)
// InsertOneUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is any instead of TData)
func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TData, error) {
insRes, err := c.coll.InsertOne(ctx, valueIn)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false)
if err != nil {
return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build()
}
return *r, nil
}
func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) {
@@ -27,3 +43,13 @@ func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.I
return insRes, nil
}
// InsertManyUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is []any instead of []TData)
func (c *Coll[TData]) InsertManyUnchecked(ctx context.Context, valueIn []any) (*mongo.InsertManyResult, error) {
insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn))
if err != nil {
return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build()
}
return insRes, nil
}

View File

@@ -34,7 +34,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
sortDirSecondary = nil
}
paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
paginationPipeline, err := createPaginationPipeline(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
if err != nil {
return nil, ct.CursorToken{}, exerr.
Wrap(err, "failed to create pagination").
@@ -50,6 +50,7 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int,
}
pipeline = append(pipeline, paginationPipeline...)
pipeline = append(pipeline, c.extraModPipeline...)
cursor, err := c.coll.Aggregate(ctx, pipeline)
if err != nil {
@@ -130,3 +131,83 @@ func (c *Coll[TData]) ListWithCount(ctx context.Context, filter ct.Filter, pageS
}
return data, token, count, nil
}
func createPaginationPipeline[TData any](coll *Coll[TData], token ct.CursorToken, fieldPrimary string, sortPrimary ct.SortDirection, fieldSecondary *string, sortSecondary *ct.SortDirection, pageSize *int) ([]bson.D, error) {
cond := bson.A{}
sort := bson.D{}
valuePrimary, err := coll.getTokenValueAsMongoType(token.ValuePrimary, fieldPrimary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (primary) token-value as mongo-type").Build()
}
if sortPrimary == ct.SortASC {
// We sort ASC on <field> - so we want all entries newer ($gt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$gt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if sortPrimary == ct.SortDESC {
// We sort DESC on <field> - so we want all entries older ($lt) than the $primary
cond = append(cond, bson.M{fieldPrimary: bson.M{"$lt": valuePrimary}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
if fieldSecondary != nil && sortSecondary != nil && *fieldSecondary != fieldPrimary {
valueSecondary, err := coll.getTokenValueAsMongoType(token.ValueSecondary, *fieldSecondary)
if err != nil {
return nil, exerr.Wrap(err, "failed to get (secondary) token-value as mongo-type").Build()
}
if *sortSecondary == ct.SortASC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a greater $secondary (= newer)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$gt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: +1})
} else if *sortSecondary == ct.SortDESC {
// the conflict-resolution condition, for entries with the _same_ <field> as the $primary we take the ones with a smaller $secondary (= older)
cond = append(cond, bson.M{"$and": bson.A{
bson.M{fieldPrimary: valuePrimary},
bson.M{*fieldSecondary: bson.M{"$lt": valueSecondary}},
}})
sort = append(sort, bson.E{Key: fieldPrimary, Value: -1})
}
}
pipeline := make([]bson.D, 0, 3)
if token.Mode == ct.CTMStart {
// no gt/lt condition
} else if token.Mode == ct.CTMNormal {
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
} else if token.Mode == ct.CTMEnd {
// false
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
} else {
return nil, exerr.New(exerr.TypeInternal, "unknown ct mode: "+string(token.Mode)).Any("token.Mode", token.Mode).Build()
}
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
if pageSize != nil {
pipeline = append(pipeline, bson.D{{Key: "$limit", Value: int64(*pageSize + 1)}})
}
return pipeline, nil
}

View File

@@ -4,7 +4,6 @@ import (
"context"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
pag "gogs.mikescher.com/BlackForestBytes/goext/pagination"
@@ -21,30 +20,29 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int,
pipelineSort := mongo.Pipeline{}
pipelineFilter := mongo.Pipeline{}
pf1 := "_id"
pd1 := ct.SortASC
sort := bson.D{}
if filter != nil {
pipelineFilter = filter.FilterQuery()
pf1, pd1 = filter.Pagination()
sort = filter.Sort()
}
if pd1 == ct.SortASC {
pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: bson.D{{Key: pf1, Value: +1}}}})
} else if pd1 == ct.SortDESC {
pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: bson.D{{Key: pf1, Value: -1}}}})
if len(sort) != 0 {
pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: sort}})
}
pipelinePaginate := mongo.Pipeline{}
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: page - 1}})
if limit != nil {
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$skip", Value: *limit * (page - 1)}})
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$limit", Value: *limit}})
} else {
page = 1
}
pipelineCount := mongo.Pipeline{}
pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$count", Value: "count"}})
pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}})
pipelineList := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelinePaginate)
pipelineList := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelinePaginate, c.extraModPipeline)
pipelineTotalCount := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelineCount)
cursorList, err := c.coll.Aggregate(ctx, pipelineList)

View File

@@ -18,7 +18,7 @@ func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M,
Build()
}
return c.decodeSingle(ctx, mongoRes)
return c.decodeSingleOrRequery(ctx, mongoRes)
}
func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error {
@@ -81,5 +81,5 @@ func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M,
Build()
}
return c.decodeSingle(ctx, mongoRes)
return c.decodeSingleOrRequery(ctx, mongoRes)
}