Compare commits
	
		
			21 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 8f15d42173 | |||
| 07fa21dcca | |||
| e657de7f78 | |||
| c534e998e8 | |||
| 88642770c5 | |||
| 8528b5cb66 | |||
| 5ba84bd8ee | |||
| 1260b2dc77 | |||
| 7d18b913c6 | |||
| d1f9069f2f | |||
| fa6d73301e | |||
| bfe62799d3 | |||
| ede912eb7b | |||
| ff8f128fe8 | |||
| 1971f1396f | |||
| bf6c184d12 | |||
| 770f5c5c64 | |||
| 623c021689 | |||
| afcc89bf9e | |||
| 1672e8f8fd | |||
| 398ed56d32 | 
| @@ -6,7 +6,12 @@ | |||||||
| name: Build Docker and Deploy | name: Build Docker and Deploy | ||||||
| run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }} | run-name: Build & Deploy ${{ gitea.ref }} on ${{ gitea.actor }} | ||||||
|  |  | ||||||
| on: [push] | on: | ||||||
|  |   push: | ||||||
|  |     branches: | ||||||
|  |       - '*' | ||||||
|  |       - '**' | ||||||
|  |  | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   run_tests: |   run_tests: | ||||||
| @@ -34,3 +39,17 @@ jobs: | |||||||
|       - name: Run tests |       - name: Run tests | ||||||
|         run: cd "${{ gitea.workspace }}" && make test |         run: cd "${{ gitea.workspace }}" && make test | ||||||
|  |  | ||||||
|  |       - name: Send failure mail | ||||||
|  |         if: failure() | ||||||
|  |         uses: dawidd6/action-send-mail@v3 | ||||||
|  |         with: | ||||||
|  |           server_address: smtp.fastmail.com | ||||||
|  |           server_port: 465 | ||||||
|  |           secure: true | ||||||
|  |           username: ${{secrets.MAIL_USERNAME}} | ||||||
|  |           password: ${{secrets.MAIL_PASSWORD}} | ||||||
|  |           subject: Pipeline on '${{ gitea.repository }}' failed | ||||||
|  |           to: ${{ steps.commiter_info.outputs.MAIL }} | ||||||
|  |           from: Gitea Actions <gitea_actions@blackforestbytes.de> | ||||||
|  |           body: "Go to https://gogs.blackforestbytes.com/${{ gitea.repository }}/actions" | ||||||
|  |  | ||||||
|   | |||||||
| @@ -37,7 +37,7 @@ var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_] | |||||||
|  |  | ||||||
| var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`)) | ||||||
|  |  | ||||||
| var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`)) | var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`)) | ||||||
|  |  | ||||||
| var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`)) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -70,6 +70,7 @@ func init() { | |||||||
| type Builder struct { | type Builder struct { | ||||||
| 	errorData       *ExErr | 	errorData       *ExErr | ||||||
| 	containsGinData bool | 	containsGinData bool | ||||||
|  | 	noLog           bool | ||||||
| } | } | ||||||
|  |  | ||||||
| func Get(err error) *Builder { | func Get(err error) *Builder { | ||||||
| @@ -190,6 +191,13 @@ func (b *Builder) System() *Builder { | |||||||
|  |  | ||||||
| // ---------------------------------------------------------------------------- | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | func (b *Builder) NoLog() *Builder { | ||||||
|  | 	b.noLog = true | ||||||
|  | 	return b | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ---------------------------------------------------------------------------- | ||||||
|  |  | ||||||
| func (b *Builder) Id(key string, val fmt.Stringer) *Builder { | func (b *Builder) Id(key string, val fmt.Stringer) *Builder { | ||||||
| 	return b.addMeta(key, MDTID, newIDWrap(val)) | 	return b.addMeta(key, MDTID, newIDWrap(val)) | ||||||
| } | } | ||||||
| @@ -401,12 +409,14 @@ func extractHeader(header map[string][]string) []string { | |||||||
|  |  | ||||||
| // Build creates a new error, ready to pass up the stack | // Build creates a new error, ready to pass up the stack | ||||||
| // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | // If the errors is not SevWarn or SevInfo it gets also logged (in short form, without stacktrace) onto stdout | ||||||
|  | // Can be gloablly configured with ZeroLogErrTraces and ZeroLogAllTraces | ||||||
|  | // Can be locally suppressed with Builder.NoLog() | ||||||
| func (b *Builder) Build() error { | func (b *Builder) Build() error { | ||||||
| 	warnOnPkgConfigNotInitialized() | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
| 	if pkgconfig.ZeroLogErrTraces && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | 	if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} else if pkgconfig.ZeroLogAllTraces { | 	} else if pkgconfig.ZeroLogAllTraces && !b.noLog { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|   | |||||||
| @@ -46,6 +46,7 @@ var ( | |||||||
| 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | 	TypeCursorTokenDecode = NewType("CURSOR_TOKEN_DECODE", langext.Ptr(500)) | ||||||
| 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) | 	TypeMongoFilter       = NewType("MONGO_FILTER", langext.Ptr(500)) | ||||||
| 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | ||||||
|  | 	TypeMongoInvalidOpt   = NewType("MONGO_INVALIDOPT", langext.Ptr(500)) | ||||||
|  |  | ||||||
| 	TypeWrap = NewType("Wrap", nil) | 	TypeWrap = NewType("Wrap", nil) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -68,6 +68,10 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
| 	initialized = true | 	initialized = true | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func Initialized() bool { | ||||||
|  | 	return initialized | ||||||
|  | } | ||||||
|  |  | ||||||
| func warnOnPkgConfigNotInitialized() { | func warnOnPkgConfigNotInitialized() { | ||||||
| 	if !initialized { | 	if !initialized { | ||||||
| 		fmt.Printf("\n") | 		fmt.Printf("\n") | ||||||
|   | |||||||
| @@ -169,14 +169,32 @@ func (ee *ExErr) ShortLog(evt *zerolog.Event) { | |||||||
|  |  | ||||||
| // RecursiveMessage returns the message to show | // RecursiveMessage returns the message to show | ||||||
| // = first error (top-down) that is not wrapping/foreign/empty | // = first error (top-down) that is not wrapping/foreign/empty | ||||||
|  | // = lowest level error (that is not empty) | ||||||
|  | // = fallback to self.message | ||||||
| func (ee *ExErr) RecursiveMessage() string { | func (ee *ExErr) RecursiveMessage() string { | ||||||
|  |  | ||||||
|  | 	// ==== [1] ==== first error (top-down) that is not wrapping/foreign/empty | ||||||
|  |  | ||||||
| 	for curr := ee; curr != nil; curr = curr.OriginalError { | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
| 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | 		if curr.Message != "" && curr.Category != CatWrap && curr.Category != CatForeign { | ||||||
| 			return curr.Message | 			return curr.Message | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	// fallback to self | 	// ==== [2] ==== lowest level error (that is not empty) | ||||||
|  |  | ||||||
|  | 	deepestMsg := "" | ||||||
|  | 	for curr := ee; curr != nil; curr = curr.OriginalError { | ||||||
|  | 		if curr.Message != "" { | ||||||
|  | 			deepestMsg = curr.Message | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if deepestMsg != "" { | ||||||
|  | 		return deepestMsg | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// ==== [3] ==== fallback to self.message | ||||||
|  |  | ||||||
| 	return ee.Message | 	return ee.Message | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										4
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								go.mod
									
									
									
									
									
								
							| @@ -1,6 +1,6 @@ | |||||||
| module gogs.mikescher.com/BlackForestBytes/goext | module gogs.mikescher.com/BlackForestBytes/goext | ||||||
|  |  | ||||||
| go 1.19 | go 1.21 | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/gin-gonic/gin v1.9.1 | 	github.com/gin-gonic/gin v1.9.1 | ||||||
| @@ -25,7 +25,7 @@ require ( | |||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.2 // indirect | ||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
| 	github.com/json-iterator/go v1.1.12 // indirect | 	github.com/json-iterator/go v1.1.12 // indirect | ||||||
| 	github.com/klauspost/compress v1.17.2 // indirect | 	github.com/klauspost/compress v1.17.3 // indirect | ||||||
| 	github.com/klauspost/cpuid/v2 v2.2.6 // indirect | 	github.com/klauspost/cpuid/v2 v2.2.6 // indirect | ||||||
| 	github.com/leodido/go-urn v1.2.4 // indirect | 	github.com/leodido/go-urn v1.2.4 // indirect | ||||||
| 	github.com/mattn/go-colorable v0.1.13 // indirect | 	github.com/mattn/go-colorable v0.1.13 // indirect | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								go.sum
									
									
									
									
									
								
							| @@ -55,6 +55,8 @@ github.com/klauspost/compress v1.17.1 h1:NE3C767s2ak2bweCZo3+rdP4U/HoyVXLv/X9f2g | |||||||
| github.com/klauspost/compress v1.17.1/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | github.com/klauspost/compress v1.17.1/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | ||||||
| github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= | github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= | ||||||
| github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= | ||||||
|  | github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA= | ||||||
|  | github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= | ||||||
| github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= | github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= | ||||||
| github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.304" | const GoextVersion = "0.0.325" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2023-11-09T09:26:46+0100" | const GoextVersionTimestamp = "2023-11-27T14:14:58+0100" | ||||||
|   | |||||||
| @@ -35,7 +35,7 @@ func IsNil(i interface{}) bool { | |||||||
| 		return true | 		return true | ||||||
| 	} | 	} | ||||||
| 	switch reflect.TypeOf(i).Kind() { | 	switch reflect.TypeOf(i).Kind() { | ||||||
| 	case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice: | 	case reflect.Ptr, reflect.Map, reflect.Chan, reflect.Slice, reflect.Func, reflect.UnsafePointer: | ||||||
| 		return reflect.ValueOf(i).IsNil() | 		return reflect.ValueOf(i).IsNil() | ||||||
| 	} | 	} | ||||||
| 	return false | 	return false | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/bsontype" | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"go.mongodb.org/mongo-driver/bson/primitive" | 	"go.mongodb.org/mongo-driver/bson/primitive" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| ) | ) | ||||||
| @@ -18,9 +19,21 @@ func CreateGoExtBsonRegistry() *bsoncodec.Registry { | |||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixTime{}), rfctime.UnixTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixTime{}), rfctime.UnixTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixMilliTime{}), rfctime.UnixMilliTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixMilliTime{}), rfctime.UnixMilliTime{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.UnixNanoTime{}), rfctime.UnixNanoTime{}) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.UnixNanoTime{}), rfctime.UnixNanoTime{}) | ||||||
|  |  | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.Date{}), rfctime.Date{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.Date{}), rfctime.Date{}) | ||||||
| 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.Date{}), rfctime.Date{}) | 	rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.Date{}), rfctime.Date{}) | ||||||
|  |  | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.SecondsF64(0)), rfctime.SecondsF64(0)) | ||||||
|  | 	rb.RegisterTypeDecoder(reflect.TypeOf(langext.Ptr(rfctime.SecondsF64(0))), rfctime.SecondsF64(0)) | ||||||
|  |  | ||||||
| 	bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) | 	bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb) | ||||||
| 	bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb) | 	bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,29 +1,28 @@ | |||||||
| package pagination | package pagination | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type Filter interface { | type Filter interface { | ||||||
| 	FilterQuery() mongo.Pipeline | 	FilterQuery() mongo.Pipeline | ||||||
| 	Pagination() (string, ct.SortDirection) | 	Sort() bson.D | ||||||
| } | } | ||||||
|  |  | ||||||
| type dynamicFilter struct { | type dynamicFilter struct { | ||||||
| 	pipeline  mongo.Pipeline | 	pipeline mongo.Pipeline | ||||||
| 	sortField string | 	sort     bson.D | ||||||
| 	sortDir   ct.SortDirection |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (d dynamicFilter) FilterQuery() mongo.Pipeline { | func (d dynamicFilter) FilterQuery() mongo.Pipeline { | ||||||
| 	return d.pipeline | 	return d.pipeline | ||||||
| } | } | ||||||
|  |  | ||||||
| func (d dynamicFilter) Pagination() (string, ct.SortDirection) { | func (d dynamicFilter) Sort() bson.D { | ||||||
| 	return d.sortField, d.sortDir | 	return d.sort | ||||||
| } | } | ||||||
|  |  | ||||||
| func CreateFilter(pipeline mongo.Pipeline, sortField string, sortdir ct.SortDirection) Filter { | func CreateFilter(pipeline mongo.Pipeline, sort bson.D) Filter { | ||||||
| 	return dynamicFilter{pipeline: pipeline, sortField: sortField, sortDir: sortdir} | 	return dynamicFilter{pipeline: pipeline, sort: sort} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,7 +2,14 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | 	"gogs.mikescher.com/BlackForestBytes/goext/timeext" | ||||||
|  | 	"reflect" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -54,6 +61,63 @@ func (d SecondsF64) MarshalJSON() ([]byte, error) { | |||||||
| 	return json.Marshal(secs) | 	return json.Marshal(secs) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (d *SecondsF64) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*d = SecondsF64(0) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDouble { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into SecondsF64", bt)) | ||||||
|  | 	} | ||||||
|  | 	var secValue float64 | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&secValue) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*d = SecondsF64(int64(secValue * float64(time.Second))) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d SecondsF64) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(d.Seconds()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (d SecondsF64) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = d.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&d)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(d)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func NewSecondsF64(t time.Duration) SecondsF64 { | func NewSecondsF64(t time.Duration) SecondsF64 { | ||||||
| 	return SecondsF64(t) | 	return SecondsF64(t) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixTime) Serialize() string { | func (t UnixTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().Unix(), 10) | 	return strconv.FormatInt(t.Time().Unix(), 10) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixMilliTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixMilliTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixMilliTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixMilliTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixMilliTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixMilliTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixMilliTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixMilliTime) Serialize() string { | func (t UnixMilliTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().UnixMilli(), 10) | 	return strconv.FormatInt(t.Time().UnixMilli(), 10) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -2,6 +2,13 @@ package rfctime | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsoncodec" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsonrw" | ||||||
|  | 	"go.mongodb.org/mongo-driver/bson/bsontype" | ||||||
|  | 	"reflect" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| @@ -59,6 +66,63 @@ func (t *UnixNanoTime) UnmarshalText(data []byte) error { | |||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (t *UnixNanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error { | ||||||
|  | 	if bt == bson.TypeNull { | ||||||
|  | 		// we can't set nil in UnmarshalBSONValue (so we use default(struct)) | ||||||
|  | 		// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values | ||||||
|  | 		// https://stackoverflow.com/questions/75167597 | ||||||
|  | 		// https://jira.mongodb.org/browse/GODRIVER-2252 | ||||||
|  | 		*t = UnixNanoTime{} | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	if bt != bson.TypeDateTime { | ||||||
|  | 		return errors.New(fmt.Sprintf("cannot unmarshal %v into UnixNanoTime", bt)) | ||||||
|  | 	} | ||||||
|  | 	var tt time.Time | ||||||
|  | 	err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	*t = UnixNanoTime(tt) | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixNanoTime) MarshalBSONValue() (bsontype.Type, []byte, error) { | ||||||
|  | 	return bson.MarshalValue(time.Time(t)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (t UnixNanoTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error { | ||||||
|  | 	if val.Kind() == reflect.Ptr && val.IsNil() { | ||||||
|  | 		if !val.CanSet() { | ||||||
|  | 			return errors.New("ValueUnmarshalerDecodeValue") | ||||||
|  | 		} | ||||||
|  | 		val.Set(reflect.New(val.Type().Elem())) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr && len(src) == 0 { | ||||||
|  | 		val.Set(reflect.Zero(val.Type())) | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	err = t.UnmarshalBSONValue(tp, src) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if val.Kind() == reflect.Ptr { | ||||||
|  | 		val.Set(reflect.ValueOf(&t)) | ||||||
|  | 	} else { | ||||||
|  | 		val.Set(reflect.ValueOf(t)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (t UnixNanoTime) Serialize() string { | func (t UnixNanoTime) Serialize() string { | ||||||
| 	return strconv.FormatInt(t.Time().UnixNano(), 10) | 	return strconv.FormatInt(t.Time().UnixNano(), 10) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,33 +1,28 @@ | |||||||
| package syncext | package syncext | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" |  | ||||||
| 	"sync" | 	"sync" | ||||||
| 	"time" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type AtomicBool struct { | type Atomic[T any] struct { | ||||||
| 	v        bool | 	v    T | ||||||
| 	listener map[string]chan bool | 	lock sync.RWMutex | ||||||
| 	lock     sync.Mutex |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func NewAtomicBool(value bool) *AtomicBool { | func NewAtomic[T any](value T) *Atomic[T] { | ||||||
| 	return &AtomicBool{ | 	return &Atomic[T]{ | ||||||
| 		v:        value, | 		v:    value, | ||||||
| 		listener: make(map[string]chan bool), | 		lock: sync.RWMutex{}, | ||||||
| 		lock:     sync.Mutex{}, |  | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Get() bool { | func (a *Atomic[T]) Get() T { | ||||||
| 	a.lock.Lock() | 	a.lock.RLock() | ||||||
| 	defer a.lock.Unlock() | 	defer a.lock.RUnlock() | ||||||
| 	return a.v | 	return a.v | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Set(value bool) bool { | func (a *Atomic[T]) Set(value T) T { | ||||||
| 	a.lock.Lock() | 	a.lock.Lock() | ||||||
| 	defer a.lock.Unlock() | 	defer a.lock.Unlock() | ||||||
|  |  | ||||||
| @@ -35,79 +30,5 @@ func (a *AtomicBool) Set(value bool) bool { | |||||||
|  |  | ||||||
| 	a.v = value | 	a.v = value | ||||||
|  |  | ||||||
| 	for k, v := range a.listener { |  | ||||||
| 		select { |  | ||||||
| 		case v <- value: |  | ||||||
| 			// message sent |  | ||||||
| 		default: |  | ||||||
| 			// no receiver on channel |  | ||||||
| 			delete(a.listener, k) |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	return oldValue | 	return oldValue | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *AtomicBool) Wait(waitFor bool) { |  | ||||||
| 	_ = a.WaitWithContext(context.Background(), waitFor) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (a *AtomicBool) WaitWithTimeout(timeout time.Duration, waitFor bool) error { |  | ||||||
| 	ctx, cancel := context.WithTimeout(context.Background(), timeout) |  | ||||||
| 	defer cancel() |  | ||||||
| 	return a.WaitWithContext(ctx, waitFor) |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func (a *AtomicBool) WaitWithContext(ctx context.Context, waitFor bool) error { |  | ||||||
| 	if err := ctx.Err(); err != nil { |  | ||||||
| 		return err |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	if a.Get() == waitFor { |  | ||||||
| 		return nil |  | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	uuid, _ := langext.NewHexUUID() |  | ||||||
|  |  | ||||||
| 	waitchan := make(chan bool) |  | ||||||
|  |  | ||||||
| 	a.lock.Lock() |  | ||||||
| 	a.listener[uuid] = waitchan |  | ||||||
| 	a.lock.Unlock() |  | ||||||
| 	defer func() { |  | ||||||
| 		a.lock.Lock() |  | ||||||
| 		delete(a.listener, uuid) |  | ||||||
| 		a.lock.Unlock() |  | ||||||
| 	}() |  | ||||||
|  |  | ||||||
| 	for { |  | ||||||
| 		if err := ctx.Err(); err != nil { |  | ||||||
| 			return err |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		timeOut := 1024 * time.Millisecond |  | ||||||
|  |  | ||||||
| 		if dl, ok := ctx.Deadline(); ok { |  | ||||||
| 			timeOutMax := dl.Sub(time.Now()) |  | ||||||
| 			if timeOutMax <= 0 { |  | ||||||
| 				timeOut = 0 |  | ||||||
| 			} else if 0 < timeOutMax && timeOutMax < timeOut { |  | ||||||
| 				timeOut = timeOutMax |  | ||||||
| 			} |  | ||||||
| 		} |  | ||||||
|  |  | ||||||
| 		if v, ok := ReadChannelWithTimeout(waitchan, timeOut); ok { |  | ||||||
| 			if v == waitFor { |  | ||||||
| 				return nil |  | ||||||
| 			} |  | ||||||
| 		} else { |  | ||||||
| 			if err := ctx.Err(); err != nil { |  | ||||||
| 				return err |  | ||||||
| 			} |  | ||||||
|  |  | ||||||
| 			if a.Get() == waitFor { |  | ||||||
| 				return nil |  | ||||||
| 			} |  | ||||||
| 		} |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
|   | |||||||
							
								
								
									
										113
									
								
								syncext/bool.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										113
									
								
								syncext/bool.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,113 @@ | |||||||
|  | package syncext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	"sync" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type AtomicBool struct { | ||||||
|  | 	v        bool | ||||||
|  | 	listener map[string]chan bool | ||||||
|  | 	lock     sync.Mutex | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func NewAtomicBool(value bool) *AtomicBool { | ||||||
|  | 	return &AtomicBool{ | ||||||
|  | 		v:        value, | ||||||
|  | 		listener: make(map[string]chan bool), | ||||||
|  | 		lock:     sync.Mutex{}, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Get() bool { | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	defer a.lock.Unlock() | ||||||
|  | 	return a.v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Set(value bool) bool { | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	defer a.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	oldValue := a.v | ||||||
|  |  | ||||||
|  | 	a.v = value | ||||||
|  |  | ||||||
|  | 	for k, v := range a.listener { | ||||||
|  | 		select { | ||||||
|  | 		case v <- value: | ||||||
|  | 			// message sent | ||||||
|  | 		default: | ||||||
|  | 			// no receiver on channel | ||||||
|  | 			delete(a.listener, k) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return oldValue | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) Wait(waitFor bool) { | ||||||
|  | 	_ = a.WaitWithContext(context.Background(), waitFor) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) WaitWithTimeout(timeout time.Duration, waitFor bool) error { | ||||||
|  | 	ctx, cancel := context.WithTimeout(context.Background(), timeout) | ||||||
|  | 	defer cancel() | ||||||
|  | 	return a.WaitWithContext(ctx, waitFor) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (a *AtomicBool) WaitWithContext(ctx context.Context, waitFor bool) error { | ||||||
|  | 	if err := ctx.Err(); err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if a.Get() == waitFor { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	uuid, _ := langext.NewHexUUID() | ||||||
|  |  | ||||||
|  | 	waitchan := make(chan bool) | ||||||
|  |  | ||||||
|  | 	a.lock.Lock() | ||||||
|  | 	a.listener[uuid] = waitchan | ||||||
|  | 	a.lock.Unlock() | ||||||
|  | 	defer func() { | ||||||
|  | 		a.lock.Lock() | ||||||
|  | 		delete(a.listener, uuid) | ||||||
|  | 		a.lock.Unlock() | ||||||
|  | 	}() | ||||||
|  |  | ||||||
|  | 	for { | ||||||
|  | 		if err := ctx.Err(); err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		timeOut := 1024 * time.Millisecond | ||||||
|  |  | ||||||
|  | 		if dl, ok := ctx.Deadline(); ok { | ||||||
|  | 			timeOutMax := dl.Sub(time.Now()) | ||||||
|  | 			if timeOutMax <= 0 { | ||||||
|  | 				timeOut = 0 | ||||||
|  | 			} else if 0 < timeOutMax && timeOutMax < timeOut { | ||||||
|  | 				timeOut = timeOutMax | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if v, ok := ReadChannelWithTimeout(waitchan, timeOut); ok { | ||||||
|  | 			if v == waitFor { | ||||||
|  | 				return nil | ||||||
|  | 			} | ||||||
|  | 		} else { | ||||||
|  | 			if err := ctx.Err(); err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			if a.Get() == waitFor { | ||||||
|  | 				return nil | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
| @@ -14,12 +14,12 @@ func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, e | |||||||
| 	if c.customDecoder != nil { | 	if c.customDecoder != nil { | ||||||
| 		res, err = (*c.customDecoder)(ctx, dec) | 		res, err = (*c.customDecoder)(ctx, dec) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).Build() | 			return *new(TData), exerr.Wrap(err, "failed to decode single entity with custom-decoder").Type("decoder", *c.customDecoder).NoLog().Build() | ||||||
| 		} | 		} | ||||||
| 	} else { | 	} else { | ||||||
| 		err = dec.Decode(&res) | 		err = dec.Decode(&res) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).Build() | 			return *new(TData), exerr.Wrap(err, "failed to decode single entity").Type("target-type", res).NoLog().Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -75,12 +75,12 @@ func (c *Coll[TData]) decodeSingleOrRequery(ctx context.Context, dec Decodable) | |||||||
| 		var res genDoc | 		var res genDoc | ||||||
| 		err := dec.Decode(&res) | 		err := dec.Decode(&res) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").Build() | 			return *new(TData), exerr.Wrap(err, "failed to ID-decode entity").NoLog().Build() | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false) | 		v, err := c.findOneInternal(ctx, bson.M{"_id": res.ID}, false) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).Build() | 			return *new(TData), exerr.Wrap(err, "failed to re-query entity").Any("_id", res.ID).NoLog().Build() | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		return *v, nil | 		return *v, nil | ||||||
|   | |||||||
							
								
								
									
										101
									
								
								wmo/queryFind.go
									
									
									
									
									
								
							
							
						
						
									
										101
									
								
								wmo/queryFind.go
									
									
									
									
									
								
							| @@ -4,18 +4,52 @@ import ( | |||||||
| 	"context" | 	"context" | ||||||
| 	"go.mongodb.org/mongo-driver/bson" | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
|  | 	"go.mongodb.org/mongo-driver/mongo/options" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Find(ctx context.Context, filter bson.M) ([]TData, error) { | func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) { | ||||||
|  |  | ||||||
| 	pipeline := mongo.Pipeline{} | 	pipeline := mongo.Pipeline{} | ||||||
| 	pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | 	pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Sort != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$sort", Value: opt.Sort}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Skip != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$skip", Value: *opt.Skip}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Limit != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$limit", Value: *opt.Limit}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | 	for _, opt := range opts { | ||||||
|  | 		if opt != nil && opt.Projection != nil { | ||||||
|  | 			pipeline = append(pipeline, bson.D{{Key: "$project", Value: opt.Projection}}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	convOpts := make([]*options.AggregateOptions, 0, len(opts)) | ||||||
|  | 	for _, v := range opts { | ||||||
|  | 		vConv, err := convertFindOpt(v) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
|  | 		} | ||||||
|  | 		convOpts = append(convOpts, vConv) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	cursor, err := c.coll.Aggregate(ctx, pipeline, convOpts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | 		return nil, exerr.Wrap(err, "mongo-aggregation failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | ||||||
| 	} | 	} | ||||||
| @@ -27,3 +61,66 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M) ([]TData, error) | |||||||
|  |  | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // converts FindOptions to AggregateOptions | ||||||
|  | func convertFindOpt(v *options.FindOptions) (*options.AggregateOptions, error) { | ||||||
|  | 	if v == nil { | ||||||
|  | 		return nil, nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := options.Aggregate() | ||||||
|  |  | ||||||
|  | 	if v.AllowDiskUse != nil { | ||||||
|  | 		r.SetAllowDiskUse(*v.AllowDiskUse) | ||||||
|  | 	} | ||||||
|  | 	if v.AllowPartialResults != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'AllowPartialResults' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.BatchSize != nil { | ||||||
|  | 		r.SetBatchSize(*v.BatchSize) | ||||||
|  | 	} | ||||||
|  | 	if v.Collation != nil { | ||||||
|  | 		r.SetCollation(v.Collation) | ||||||
|  | 	} | ||||||
|  | 	if v.Comment != nil { | ||||||
|  | 		r.SetComment(*v.Comment) | ||||||
|  | 	} | ||||||
|  | 	if v.CursorType != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'CursorType' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Hint != nil { | ||||||
|  | 		r.SetHint(v.Hint) | ||||||
|  | 	} | ||||||
|  | 	if v.Max != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Max' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.MaxAwaitTime != nil { | ||||||
|  | 		r.SetMaxAwaitTime(*v.MaxAwaitTime) | ||||||
|  | 	} | ||||||
|  | 	if v.MaxTime != nil { | ||||||
|  | 		r.SetMaxTime(*v.MaxTime) | ||||||
|  | 	} | ||||||
|  | 	if v.Min != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Min' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.NoCursorTimeout != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'NoCursorTimeout' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.OplogReplay != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'OplogReplay' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.ReturnKey != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'ReturnKey' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.ShowRecordID != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'ShowRecordID' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Snapshot != nil { | ||||||
|  | 		return nil, exerr.New(exerr.TypeMongoInvalidOpt, "Invalid option 'Snapshot' (cannot convert to AggregateOptions)").Build() | ||||||
|  | 	} | ||||||
|  | 	if v.Let != nil { | ||||||
|  | 		r.SetLet(v.Let) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r, nil | ||||||
|  | } | ||||||
|   | |||||||
| @@ -58,7 +58,7 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN | |||||||
| 			return nil, nil | 			return nil, nil | ||||||
| 		} | 		} | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).Build() | 			return nil, exerr.Wrap(err, "mongo-query[find-one] failed").Any("filter", filter).Str("collection", c.Name()).NoLog().Build() | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		return &res, nil | 		return &res, nil | ||||||
| @@ -75,19 +75,19 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN | |||||||
|  |  | ||||||
| 		cursor, err := c.coll.Aggregate(ctx, pipeline) | 		cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | 			return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		if cursor.Next(ctx) { | 		if cursor.Next(ctx) { | ||||||
| 			v, err := c.decodeSingle(ctx, cursor) | 			v, err := c.decodeSingle(ctx, cursor) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | 				return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed to decode results").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
| 			} | 			} | ||||||
| 			return &v, nil | 			return &v, nil | ||||||
| 		} else if allowNull { | 		} else if allowNull { | ||||||
| 			return nil, nil | 			return nil, nil | ||||||
| 		} else { | 		} else { | ||||||
| 			return nil, exerr.Wrap(err, "mongo-aggregation [find-one] failed").Any("pipeline", pipeline).Str("collection", c.Name()).Build() | 			return nil, exerr.Wrap(mongo.ErrNoDocuments, "mongo-aggregation [find-one] returned no documents").Any("pipeline", pipeline).Str("collection", c.Name()).NoLog().Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -21,6 +21,20 @@ func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, erro | |||||||
| 	return *r, nil | 	return *r, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // InsertOneUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is any instead of TData) | ||||||
|  | func (c *Coll[TData]) InsertOneUnchecked(ctx context.Context, valueIn any) (TData, error) { | ||||||
|  | 	insRes, err := c.coll.InsertOne(ctx, valueIn) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r, err := c.findOneInternal(ctx, bson.M{"_id": insRes.InsertedID}, false) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return *new(TData), exerr.Wrap(err, "mongo-query[insert-one] failed").Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  | 	return *r, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.InsertManyResult, error) { | ||||||
| 	insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) | 	insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -29,3 +43,13 @@ func (c *Coll[TData]) InsertMany(ctx context.Context, valueIn []TData) (*mongo.I | |||||||
|  |  | ||||||
| 	return insRes, nil | 	return insRes, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // InsertManyUnchecked behaves the same as InsertOne, but allows arbitrary data to be inserted (valueIn is []any instead of []TData) | ||||||
|  | func (c *Coll[TData]) InsertManyUnchecked(ctx context.Context, valueIn []any) (*mongo.InsertManyResult, error) { | ||||||
|  | 	insRes, err := c.coll.InsertMany(ctx, langext.ArrayToInterface(valueIn)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, exerr.Wrap(err, "mongo-query[insert-many] failed").Int("len(valueIn)", len(valueIn)).Str("collection", c.Name()).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return insRes, nil | ||||||
|  | } | ||||||
|   | |||||||
| @@ -4,7 +4,6 @@ import ( | |||||||
| 	"context" | 	"context" | ||||||
| 	"go.mongodb.org/mongo-driver/bson" | 	"go.mongodb.org/mongo-driver/bson" | ||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" |  | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||||
| @@ -21,18 +20,15 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, | |||||||
|  |  | ||||||
| 	pipelineSort := mongo.Pipeline{} | 	pipelineSort := mongo.Pipeline{} | ||||||
| 	pipelineFilter := mongo.Pipeline{} | 	pipelineFilter := mongo.Pipeline{} | ||||||
| 	pf1 := "_id" | 	sort := bson.D{} | ||||||
| 	pd1 := ct.SortASC |  | ||||||
|  |  | ||||||
| 	if filter != nil { | 	if filter != nil { | ||||||
| 		pipelineFilter = filter.FilterQuery() | 		pipelineFilter = filter.FilterQuery() | ||||||
| 		pf1, pd1 = filter.Pagination() | 		sort = filter.Sort() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if pd1 == ct.SortASC { | 	if len(sort) != 0 { | ||||||
| 		pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: bson.D{{Key: pf1, Value: +1}}}}) | 		pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: sort}}) | ||||||
| 	} else if pd1 == ct.SortDESC { |  | ||||||
| 		pipelineSort = append(pipelineSort, bson.D{{Key: "$sort", Value: bson.D{{Key: pf1, Value: -1}}}}) |  | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	pipelinePaginate := mongo.Pipeline{} | 	pipelinePaginate := mongo.Pipeline{} | ||||||
| @@ -44,10 +40,10 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	pipelineCount := mongo.Pipeline{} | 	pipelineCount := mongo.Pipeline{} | ||||||
| 	pipelinePaginate = append(pipelinePaginate, bson.D{{Key: "$count", Value: "count"}}) | 	pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}}) | ||||||
|  |  | ||||||
| 	pipelineList := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelinePaginate, c.extraModPipeline) | 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, c.extraModPipeline, pipelineSort) | ||||||
| 	pipelineTotalCount := langext.ArrConcat(mongo.Pipeline{}, pipelineFilter, pipelineCount) | 	pipelineTotalCount := langext.ArrConcat(pipelineFilter, pipelineCount) | ||||||
|  |  | ||||||
| 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user