Compare commits
	
		
			19 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| b9d0348735 | |||
| b9e9575b9b | |||
| 295a098eb4 | |||
| b69a082bb1 | |||
| a4a8c83d17 | |||
| e952176bb0 | |||
| d99adb203b | |||
| f1f91f4cfa | |||
| 2afb265ea4 | |||
| be24f7a190 | |||
| aae8a706e9 | |||
| 7d64f18f54 | |||
| d08b2e565a | |||
| d29e84894d | |||
| 617298c366 | |||
| 668f308565 | |||
| 240a8ed7aa | |||
| 70de8e8d04 | |||
| d38fa60fbc | 
							
								
								
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,263 @@ | |||||||
|  | package cryptext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"crypto/rand" | ||||||
|  | 	"io" | ||||||
|  | 	"math/big" | ||||||
|  | 	mathrand "math/rand" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	ppStartChar            = "BCDFGHJKLMNPQRSTVWXZ" | ||||||
|  | 	ppEndChar              = "ABDEFIKMNORSTUXYZ" | ||||||
|  | 	ppVowel                = "AEIOUY" | ||||||
|  | 	ppConsonant            = "BCDFGHJKLMNPQRSTVWXZ" | ||||||
|  | 	ppSegmentLenMin        = 3 | ||||||
|  | 	ppSegmentLenMax        = 7 | ||||||
|  | 	ppMaxRepeatedVowel     = 2 | ||||||
|  | 	ppMaxRepeatedConsonant = 2 | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var ppContinuation = map[uint8]string{ | ||||||
|  | 	'A': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'B': "ADFIKLMNORSTUY", | ||||||
|  | 	'C': "AEIKOUY", | ||||||
|  | 	'D': "AEILORSUYZ", | ||||||
|  | 	'E': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'F': "ADEGIKLOPRTUY", | ||||||
|  | 	'G': "ABDEFHILMNORSTUY", | ||||||
|  | 	'H': "AEIOUY", | ||||||
|  | 	'I': "BCDFGHJKLMNPRSTVWXZ", | ||||||
|  | 	'J': "AEIOUY", | ||||||
|  | 	'K': "ADEFHILMNORSTUY", | ||||||
|  | 	'L': "ADEFGIJKMNOPSTUVWYZ", | ||||||
|  | 	'M': "ABEFIKOPSTUY", | ||||||
|  | 	'N': "ABEFIKOPSTUY", | ||||||
|  | 	'O': "BCDFGHJKLMNPRSTVWXYZ", | ||||||
|  | 	'P': "AEFIJLORSTUY", | ||||||
|  | 	'Q': "AEIOUY", | ||||||
|  | 	'R': "ADEFGHIJKLMNOPSTUVYZ", | ||||||
|  | 	'S': "ACDEIKLOPTUYZ", | ||||||
|  | 	'T': "AEHIJOPRSUWY", | ||||||
|  | 	'U': "BCDFGHJKLMNPRSTVWXZ", | ||||||
|  | 	'V': "AEIOUY", | ||||||
|  | 	'W': "AEIOUY", | ||||||
|  | 	'X': "AEIOUY", | ||||||
|  | 	'Y': "ABCDFGHKLMNPRSTVXZ", | ||||||
|  | 	'Z': "AEILOTUY", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var ppLog2Map = map[int]float64{ | ||||||
|  | 	1:  0.00000000, | ||||||
|  | 	2:  1.00000000, | ||||||
|  | 	3:  1.58496250, | ||||||
|  | 	4:  2.00000000, | ||||||
|  | 	5:  2.32192809, | ||||||
|  | 	6:  2.58496250, | ||||||
|  | 	7:  2.80735492, | ||||||
|  | 	8:  3.00000000, | ||||||
|  | 	9:  3.16992500, | ||||||
|  | 	10: 3.32192809, | ||||||
|  | 	11: 3.45943162, | ||||||
|  | 	12: 3.58496250, | ||||||
|  | 	13: 3.70043972, | ||||||
|  | 	14: 3.80735492, | ||||||
|  | 	15: 3.90689060, | ||||||
|  | 	16: 4.00000000, | ||||||
|  | 	17: 4.08746284, | ||||||
|  | 	18: 4.16992500, | ||||||
|  | 	19: 4.24792751, | ||||||
|  | 	20: 4.32192809, | ||||||
|  | 	21: 4.39231742, | ||||||
|  | 	22: 4.45943162, | ||||||
|  | 	23: 4.52356196, | ||||||
|  | 	24: 4.58496250, | ||||||
|  | 	25: 4.64385619, | ||||||
|  | 	26: 4.70043972, | ||||||
|  | 	27: 4.75488750, | ||||||
|  | 	28: 4.80735492, | ||||||
|  | 	29: 4.85798100, | ||||||
|  | 	30: 4.90689060, | ||||||
|  | 	31: 4.95419631, | ||||||
|  | 	32: 5.00000000, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var ( | ||||||
|  | 	ppVowelMap     = ppMakeSet(ppVowel) | ||||||
|  | 	ppConsonantMap = ppMakeSet(ppConsonant) | ||||||
|  | 	ppEndCharMap   = ppMakeSet(ppEndChar) | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func ppMakeSet(v string) map[uint8]bool { | ||||||
|  | 	mp := make(map[uint8]bool, len(v)) | ||||||
|  | 	for _, chr := range v { | ||||||
|  | 		mp[uint8(chr)] = true | ||||||
|  | 	} | ||||||
|  | 	return mp | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppRandInt(rng io.Reader, max int) int { | ||||||
|  | 	v, err := rand.Int(rng, big.NewInt(int64(max))) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return int(v.Int64()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppRand(rng io.Reader, chars string, entropy *float64) uint8 { | ||||||
|  | 	chr := chars[ppRandInt(rng, len(chars))] | ||||||
|  |  | ||||||
|  | 	*entropy = *entropy + ppLog2Map[len(chars)] | ||||||
|  |  | ||||||
|  | 	return chr | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharType(chr uint8) (bool, bool) { | ||||||
|  | 	_, ok1 := ppVowelMap[chr] | ||||||
|  | 	_, ok2 := ppConsonantMap[chr] | ||||||
|  |  | ||||||
|  | 	return ok1, ok2 | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharsetRemove(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||||
|  | 	result := "" | ||||||
|  | 	for _, chr := range cs { | ||||||
|  | 		if _, ok := set[uint8(chr)]; !ok { | ||||||
|  | 			result += string(chr) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if result == "" && !allowEmpty { | ||||||
|  | 		return cs | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func ppCharsetFilter(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||||
|  | 	result := "" | ||||||
|  | 	for _, chr := range cs { | ||||||
|  | 		if _, ok := set[uint8(chr)]; ok { | ||||||
|  | 			result += string(chr) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	if result == "" && !allowEmpty { | ||||||
|  | 		return cs | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePasswordExt(rng io.Reader, pwlen int) (string, float64) { | ||||||
|  |  | ||||||
|  | 	// kinda pseudo markov-chain - with a few extra rules and no weights... | ||||||
|  |  | ||||||
|  | 	if pwlen <= 0 { | ||||||
|  | 		return "", 0 | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	vowelCount := 0 | ||||||
|  | 	consoCount := 0 | ||||||
|  | 	entropy := float64(0) | ||||||
|  |  | ||||||
|  | 	startChar := ppRand(rng, ppStartChar, &entropy) | ||||||
|  |  | ||||||
|  | 	result := string(startChar) | ||||||
|  | 	currentChar := startChar | ||||||
|  |  | ||||||
|  | 	isVowel, isConsonant := ppCharType(currentChar) | ||||||
|  | 	if isVowel { | ||||||
|  | 		vowelCount = 1 | ||||||
|  | 	} | ||||||
|  | 	if isConsonant { | ||||||
|  | 		consoCount = ppMaxRepeatedConsonant | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	segmentLen := 1 | ||||||
|  |  | ||||||
|  | 	segmentLenTarget := ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||||
|  |  | ||||||
|  | 	for len(result) < pwlen { | ||||||
|  |  | ||||||
|  | 		charset := ppContinuation[currentChar] | ||||||
|  | 		if vowelCount >= ppMaxRepeatedVowel { | ||||||
|  | 			charset = ppCharsetRemove(charset, ppVowelMap, false) | ||||||
|  | 		} | ||||||
|  | 		if consoCount >= ppMaxRepeatedConsonant { | ||||||
|  | 			charset = ppCharsetRemove(charset, ppConsonantMap, false) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		lastOfSegment := false | ||||||
|  | 		newSegment := false | ||||||
|  |  | ||||||
|  | 		if len(result)+1 == pwlen { | ||||||
|  | 			// last of result | ||||||
|  | 			charset = ppCharsetFilter(charset, ppEndCharMap, false) | ||||||
|  | 		} else if segmentLen+1 == segmentLenTarget { | ||||||
|  | 			// last of segment | ||||||
|  | 			charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||||
|  | 			if charsetNew != "" { | ||||||
|  | 				charset = charsetNew | ||||||
|  | 				lastOfSegment = true | ||||||
|  | 			} | ||||||
|  | 		} else if segmentLen >= segmentLenTarget { | ||||||
|  | 			// (perhaps) start of new segment | ||||||
|  | 			if _, ok := ppEndCharMap[currentChar]; ok { | ||||||
|  | 				charset = ppStartChar | ||||||
|  | 				newSegment = true | ||||||
|  | 			} else { | ||||||
|  | 				// continue segment for one more char to (hopefully) find an end-char | ||||||
|  | 				charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||||
|  | 				if charsetNew != "" { | ||||||
|  | 					charset = charsetNew | ||||||
|  | 					lastOfSegment = true | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 		} else { | ||||||
|  | 			// normal continuation | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		newChar := ppRand(rng, charset, &entropy) | ||||||
|  | 		if lastOfSegment { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen++ | ||||||
|  | 			result += strings.ToLower(string(newChar)) | ||||||
|  | 		} else if newSegment { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen = 1 | ||||||
|  | 			result += strings.ToUpper(string(newChar)) | ||||||
|  | 			segmentLenTarget = ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||||
|  | 			vowelCount = 0 | ||||||
|  | 			consoCount = 0 | ||||||
|  | 		} else { | ||||||
|  | 			currentChar = newChar | ||||||
|  | 			segmentLen++ | ||||||
|  | 			result += strings.ToLower(string(newChar)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		isVowel, isConsonant := ppCharType(currentChar) | ||||||
|  | 		if isVowel { | ||||||
|  | 			vowelCount++ | ||||||
|  | 			consoCount = 0 | ||||||
|  | 		} | ||||||
|  | 		if isConsonant { | ||||||
|  | 			vowelCount = 0 | ||||||
|  | 			if newSegment { | ||||||
|  | 				consoCount = ppMaxRepeatedConsonant | ||||||
|  | 			} else { | ||||||
|  | 				consoCount++ | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return result, entropy | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePassword(len int) string { | ||||||
|  | 	v, _ := PronouncablePasswordExt(rand.Reader, len) | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func PronouncablePasswordSeeded(seed int64, len int) string { | ||||||
|  |  | ||||||
|  | 	v, _ := PronouncablePasswordExt(mathrand.New(mathrand.NewSource(seed)), len) | ||||||
|  | 	return v | ||||||
|  | } | ||||||
							
								
								
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | |||||||
|  | package cryptext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"math/rand" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordExt(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw, entropy := PronouncablePasswordExt(rand.New(rand.NewSource(int64(i))), 16) | ||||||
|  | 		fmt.Printf("[%.2f] => %s\n", entropy, pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordSeeded(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw := PronouncablePasswordSeeded(int64(i), 8) | ||||||
|  | 		fmt.Printf("%s\n", pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePassword(t *testing.T) { | ||||||
|  | 	for i := 0; i < 20; i++ { | ||||||
|  | 		pw := PronouncablePassword(i + 1) | ||||||
|  | 		fmt.Printf("%s\n", pw) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestPronouncablePasswordWrongLen(t *testing.T) { | ||||||
|  | 	PronouncablePassword(0) | ||||||
|  | 	PronouncablePassword(-1) | ||||||
|  | 	PronouncablePassword(-2) | ||||||
|  | 	PronouncablePassword(-3) | ||||||
|  | } | ||||||
							
								
								
									
										113
									
								
								dataext/syncMap.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										113
									
								
								dataext/syncMap.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,113 @@ | |||||||
|  | package dataext | ||||||
|  |  | ||||||
|  | import "sync" | ||||||
|  |  | ||||||
|  | type SyncMap[TKey comparable, TData any] struct { | ||||||
|  | 	data map[TKey]TData | ||||||
|  | 	lock sync.Mutex | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Set(key TKey, data TData) { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	s.data[key] = data | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) SetIfNotContains(key TKey, data TData) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if _, existsInPreState := s.data[key]; existsInPreState { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	s.data[key] = data | ||||||
|  |  | ||||||
|  | 	return true | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Get(key TKey) (TData, bool) { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if v, ok := s.data[key]; ok { | ||||||
|  | 		return v, true | ||||||
|  | 	} else { | ||||||
|  | 		return *new(TData), false | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Delete(key TKey) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	_, ok := s.data[key] | ||||||
|  |  | ||||||
|  | 	delete(s.data, key) | ||||||
|  |  | ||||||
|  | 	return ok | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) Contains(key TKey) bool { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	_, ok := s.data[key] | ||||||
|  |  | ||||||
|  | 	return ok | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAllKeys() []TKey { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := make([]TKey, 0, len(s.data)) | ||||||
|  |  | ||||||
|  | 	for k := range s.data { | ||||||
|  | 		r = append(r, k) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (s *SyncMap[TKey, TData]) GetAllValues() []TData { | ||||||
|  | 	s.lock.Lock() | ||||||
|  | 	defer s.lock.Unlock() | ||||||
|  |  | ||||||
|  | 	if s.data == nil { | ||||||
|  | 		s.data = make(map[TKey]TData) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	r := make([]TData, 0, len(s.data)) | ||||||
|  |  | ||||||
|  | 	for _, v := range s.data { | ||||||
|  | 		r = append(r, v) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return r | ||||||
|  | } | ||||||
| @@ -68,6 +68,7 @@ func init() { | |||||||
| } | } | ||||||
|  |  | ||||||
| type Builder struct { | type Builder struct { | ||||||
|  | 	wrappedErr      error | ||||||
| 	errorData       *ExErr | 	errorData       *ExErr | ||||||
| 	containsGinData bool | 	containsGinData bool | ||||||
| 	noLog           bool | 	noLog           bool | ||||||
| @@ -89,9 +90,9 @@ func Wrap(err error, msg string) *Builder { | |||||||
| 	if !pkgconfig.RecursiveErrors { | 	if !pkgconfig.RecursiveErrors { | ||||||
| 		v := FromError(err) | 		v := FromError(err) | ||||||
| 		v.Message = msg | 		v.Message = msg | ||||||
| 		return &Builder{errorData: v} | 		return &Builder{wrappedErr: err, errorData: v} | ||||||
| 	} | 	} | ||||||
| 	return &Builder{errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | 	return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | ||||||
| } | } | ||||||
|  |  | ||||||
| // ---------------------------------------------------------------------------- | // ---------------------------------------------------------------------------- | ||||||
| @@ -414,13 +415,17 @@ func extractHeader(header map[string][]string) []string { | |||||||
| func (b *Builder) Build() error { | func (b *Builder) Build() error { | ||||||
| 	warnOnPkgConfigNotInitialized() | 	warnOnPkgConfigNotInitialized() | ||||||
|  |  | ||||||
|  | 	if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil { | ||||||
|  | 		return b.wrappedErr | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | 	if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} else if pkgconfig.ZeroLogAllTraces && !b.noLog { | 	} else if pkgconfig.ZeroLogAllTraces && !b.noLog { | ||||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodBuild) | 	b.errorData.CallListener(MethodBuild) | ||||||
|  |  | ||||||
| 	return b.errorData | 	return b.errorData | ||||||
| } | } | ||||||
| @@ -442,7 +447,7 @@ func (b *Builder) Output(ctx context.Context, g *gin.Context) { | |||||||
| 		b.errorData.Log(stackSkipLogger.Warn()) | 		b.errorData.Log(stackSkipLogger.Warn()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodOutput) | 	b.errorData.CallListener(MethodOutput) | ||||||
| } | } | ||||||
|  |  | ||||||
| // Print prints the error | // Print prints the error | ||||||
| @@ -454,7 +459,7 @@ func (b *Builder) Print() { | |||||||
| 		b.errorData.ShortLog(stackSkipLogger.Warn()) | 		b.errorData.ShortLog(stackSkipLogger.Warn()) | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	b.CallListener(MethodPrint) | 	b.errorData.CallListener(MethodPrint) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) Format(level LogPrintLevel) string { | func (b *Builder) Format(level LogPrintLevel) string { | ||||||
| @@ -467,7 +472,7 @@ func (b *Builder) Fatal() { | |||||||
| 	b.errorData.Severity = SevFatal | 	b.errorData.Severity = SevFatal | ||||||
| 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | 	b.errorData.Log(stackSkipLogger.WithLevel(zerolog.FatalLevel)) | ||||||
|  |  | ||||||
| 	b.CallListener(MethodFatal) | 	b.errorData.CallListener(MethodFatal) | ||||||
|  |  | ||||||
| 	os.Exit(1) | 	os.Exit(1) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -50,6 +50,7 @@ var ( | |||||||
|  |  | ||||||
| 	TypeSQLQuery  = NewType("SQL_QUERY", langext.Ptr(500)) | 	TypeSQLQuery  = NewType("SQL_QUERY", langext.Ptr(500)) | ||||||
| 	TypeSQLBuild  = NewType("SQL_BUILD", langext.Ptr(500)) | 	TypeSQLBuild  = NewType("SQL_BUILD", langext.Ptr(500)) | ||||||
|  | 	TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500)) | ||||||
|  |  | ||||||
| 	TypeWrap = NewType("Wrap", nil) | 	TypeWrap = NewType("Wrap", nil) | ||||||
|  |  | ||||||
| @@ -71,15 +72,18 @@ var ( | |||||||
| 	// other values come from the downstream application that uses goext | 	// other values come from the downstream application that uses goext | ||||||
| ) | ) | ||||||
|  |  | ||||||
| var registeredTypes = dataext.SyncSet[string]{} | var registeredTypes = dataext.SyncMap[string, ErrorType]{} | ||||||
|  |  | ||||||
| func NewType(key string, defStatusCode *int) ErrorType { | func NewType(key string, defStatusCode *int) ErrorType { | ||||||
| 	insertOkay := registeredTypes.Add(key) | 	et := ErrorType{key, defStatusCode} | ||||||
| 	if !insertOkay { |  | ||||||
| 		panic("Cannot register same ErrType ('" + key + "') more than once") | 	registeredTypes.Set(key, et) | ||||||
|  |  | ||||||
|  | 	return et | ||||||
| } | } | ||||||
|  |  | ||||||
| 	return ErrorType{key, defStatusCode} | func ListRegisteredTypes() []ErrorType { | ||||||
|  | 	return registeredTypes.GetAllValues() | ||||||
| } | } | ||||||
|  |  | ||||||
| type LogPrintLevel string | type LogPrintLevel string | ||||||
|   | |||||||
| @@ -13,6 +13,7 @@ type ErrorPackageConfig struct { | |||||||
| 	IncludeMetaInGinOutput bool                                             // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output() | 	IncludeMetaInGinOutput bool                                             // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output() | ||||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any)            // (Optionally) extend the gin output with more fields | 	ExtendGinOutput        func(err *ExErr, json map[string]any)            // (Optionally) extend the gin output with more fields | ||||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields | 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields | ||||||
|  | 	DisableErrorWrapping   bool                                             // Disables the exerr.Wrap()...Build() function - will always return the original error | ||||||
| } | } | ||||||
|  |  | ||||||
| type ErrorPackageConfigInit struct { | type ErrorPackageConfigInit struct { | ||||||
| @@ -23,6 +24,7 @@ type ErrorPackageConfigInit struct { | |||||||
| 	IncludeMetaInGinOutput *bool | 	IncludeMetaInGinOutput *bool | ||||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any) | 	ExtendGinOutput        func(err *ExErr, json map[string]any) | ||||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | ||||||
|  | 	DisableErrorWrapping   *bool | ||||||
| } | } | ||||||
|  |  | ||||||
| var initialized = false | var initialized = false | ||||||
| @@ -35,6 +37,7 @@ var pkgconfig = ErrorPackageConfig{ | |||||||
| 	IncludeMetaInGinOutput: true, | 	IncludeMetaInGinOutput: true, | ||||||
| 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | ||||||
| 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | ||||||
|  | 	DisableErrorWrapping:   false, | ||||||
| } | } | ||||||
|  |  | ||||||
| // Init initializes the exerr packages | // Init initializes the exerr packages | ||||||
| @@ -63,6 +66,7 @@ func Init(cfg ErrorPackageConfigInit) { | |||||||
| 		IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput), | 		IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput), | ||||||
| 		ExtendGinOutput:        ego, | 		ExtendGinOutput:        ego, | ||||||
| 		ExtendGinDataOutput:    egdo, | 		ExtendGinDataOutput:    egdo, | ||||||
|  | 		DisableErrorWrapping:   langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping), | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	initialized = true | 	initialized = true | ||||||
|   | |||||||
| @@ -25,13 +25,11 @@ func RegisterListener(l Listener) { | |||||||
| 	listener = append(listener, l) | 	listener = append(listener, l) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (b *Builder) CallListener(m Method) { | func (ee *ExErr) CallListener(m Method) { | ||||||
| 	valErr := b.errorData |  | ||||||
|  |  | ||||||
| 	listenerLock.Lock() | 	listenerLock.Lock() | ||||||
| 	defer listenerLock.Unlock() | 	defer listenerLock.Unlock() | ||||||
|  |  | ||||||
| 	for _, v := range listener { | 	for _, v := range listener { | ||||||
| 		v(m, valErr) | 		v(m, ee) | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -9,6 +9,7 @@ import ( | |||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | 	"gogs.mikescher.com/BlackForestBytes/goext/rext" | ||||||
| 	"net" | 	"net" | ||||||
| 	"net/http" | 	"net/http" | ||||||
|  | 	"net/http/httptest" | ||||||
| 	"regexp" | 	"regexp" | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| @@ -16,7 +17,7 @@ import ( | |||||||
|  |  | ||||||
| type GinWrapper struct { | type GinWrapper struct { | ||||||
| 	engine          *gin.Engine | 	engine          *gin.Engine | ||||||
| 	SuppressGinLogs bool | 	suppressGinLogs bool | ||||||
|  |  | ||||||
| 	allowCors             bool | 	allowCors             bool | ||||||
| 	ginDebug              bool | 	ginDebug              bool | ||||||
| @@ -50,7 +51,7 @@ func NewEngine(opt Options) *GinWrapper { | |||||||
|  |  | ||||||
| 	wrapper := &GinWrapper{ | 	wrapper := &GinWrapper{ | ||||||
| 		engine:                engine, | 		engine:                engine, | ||||||
| 		SuppressGinLogs:       false, | 		suppressGinLogs:       false, | ||||||
| 		allowCors:             langext.Coalesce(opt.AllowCors, false), | 		allowCors:             langext.Coalesce(opt.AllowCors, false), | ||||||
| 		ginDebug:              langext.Coalesce(opt.GinDebug, true), | 		ginDebug:              langext.Coalesce(opt.GinDebug, true), | ||||||
| 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | ||||||
| @@ -74,7 +75,7 @@ func NewEngine(opt Options) *GinWrapper { | |||||||
|  |  | ||||||
| 		ginlogger := gin.Logger() | 		ginlogger := gin.Logger() | ||||||
| 		engine.Use(func(context *gin.Context) { | 		engine.Use(func(context *gin.Context) { | ||||||
| 			if !wrapper.SuppressGinLogs { | 			if !wrapper.suppressGinLogs { | ||||||
| 				ginlogger(context) | 				ginlogger(context) | ||||||
| 			} | 			} | ||||||
| 		}) | 		}) | ||||||
| @@ -185,3 +186,10 @@ func (w *GinWrapper) cleanMiddlewareName(fname string) string { | |||||||
|  |  | ||||||
| 	return fname | 	return fname | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // ServeHTTP only used for unit tests | ||||||
|  | func (w *GinWrapper) ServeHTTP(req *http.Request) *httptest.ResponseRecorder { | ||||||
|  | 	respRec := httptest.NewRecorder() | ||||||
|  | 	w.engine.ServeHTTP(respRec, req) | ||||||
|  | 	return respRec | ||||||
|  | } | ||||||
|   | |||||||
| @@ -163,16 +163,16 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | |||||||
|  |  | ||||||
| 	ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout)) | 	ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout)) | ||||||
|  |  | ||||||
|  | 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | ||||||
|  |  | ||||||
| 	if pctx.persistantData.sessionObj != nil { | 	if pctx.persistantData.sessionObj != nil { | ||||||
| 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, ictx) | 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			cancel() | 			actx.Cancel() | ||||||
| 			return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build())) | 			return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build())) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) |  | ||||||
|  |  | ||||||
| 	return actx, pctx.ginCtx, nil | 	return actx, pctx.ginCtx, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -9,6 +9,16 @@ import ( | |||||||
| 	"os" | 	"os" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | type cookieval struct { | ||||||
|  | 	name     string | ||||||
|  | 	value    string | ||||||
|  | 	maxAge   int | ||||||
|  | 	path     string | ||||||
|  | 	domain   string | ||||||
|  | 	secure   bool | ||||||
|  | 	httpOnly bool | ||||||
|  | } | ||||||
|  |  | ||||||
| type headerval struct { | type headerval struct { | ||||||
| 	Key string | 	Key string | ||||||
| 	Val string | 	Val string | ||||||
| @@ -17,6 +27,7 @@ type headerval struct { | |||||||
| type HTTPResponse interface { | type HTTPResponse interface { | ||||||
| 	Write(g *gin.Context) | 	Write(g *gin.Context) | ||||||
| 	WithHeader(k string, v string) HTTPResponse | 	WithHeader(k string, v string) HTTPResponse | ||||||
|  | 	WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse | ||||||
| 	IsSuccess() bool | 	IsSuccess() bool | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -33,6 +44,7 @@ type jsonHTTPResponse struct { | |||||||
| 	statusCode int | 	statusCode int | ||||||
| 	data       any | 	data       any | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender { | func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender { | ||||||
| @@ -47,6 +59,9 @@ func (j jsonHTTPResponse) Write(g *gin.Context) { | |||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Render(j.statusCode, j.jsonRenderer(g)) | 	g.Render(j.statusCode, j.jsonRenderer(g)) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -55,6 +70,11 @@ func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
| func (j jsonHTTPResponse) IsSuccess() bool { | func (j jsonHTTPResponse) IsSuccess() bool { | ||||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
| } | } | ||||||
| @@ -82,12 +102,16 @@ func (j jsonHTTPResponse) Headers() []string { | |||||||
| type emptyHTTPResponse struct { | type emptyHTTPResponse struct { | ||||||
| 	statusCode int | 	statusCode int | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j emptyHTTPResponse) Write(g *gin.Context) { | func (j emptyHTTPResponse) Write(g *gin.Context) { | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Status(j.statusCode) | 	g.Status(j.statusCode) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -96,6 +120,11 @@ func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
| func (j emptyHTTPResponse) IsSuccess() bool { | func (j emptyHTTPResponse) IsSuccess() bool { | ||||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
| } | } | ||||||
| @@ -120,12 +149,16 @@ type textHTTPResponse struct { | |||||||
| 	statusCode int | 	statusCode int | ||||||
| 	data       string | 	data       string | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j textHTTPResponse) Write(g *gin.Context) { | func (j textHTTPResponse) Write(g *gin.Context) { | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.String(j.statusCode, "%s", j.data) | 	g.String(j.statusCode, "%s", j.data) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -134,6 +167,11 @@ func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
| func (j textHTTPResponse) IsSuccess() bool { | func (j textHTTPResponse) IsSuccess() bool { | ||||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
| } | } | ||||||
| @@ -159,12 +197,16 @@ type dataHTTPResponse struct { | |||||||
| 	data        []byte | 	data        []byte | ||||||
| 	contentType string | 	contentType string | ||||||
| 	headers     []headerval | 	headers     []headerval | ||||||
|  | 	cookies     []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j dataHTTPResponse) Write(g *gin.Context) { | func (j dataHTTPResponse) Write(g *gin.Context) { | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Data(j.statusCode, j.contentType, j.data) | 	g.Data(j.statusCode, j.contentType, j.data) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -173,6 +215,11 @@ func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
| func (j dataHTTPResponse) IsSuccess() bool { | func (j dataHTTPResponse) IsSuccess() bool { | ||||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
| } | } | ||||||
| @@ -198,6 +245,7 @@ type fileHTTPResponse struct { | |||||||
| 	filepath string | 	filepath string | ||||||
| 	filename *string | 	filename *string | ||||||
| 	headers  []headerval | 	headers  []headerval | ||||||
|  | 	cookies  []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j fileHTTPResponse) Write(g *gin.Context) { | func (j fileHTTPResponse) Write(g *gin.Context) { | ||||||
| @@ -209,6 +257,9 @@ func (j fileHTTPResponse) Write(g *gin.Context) { | |||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.File(j.filepath) | 	g.File(j.filepath) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -217,6 +268,11 @@ func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
| func (j fileHTTPResponse) IsSuccess() bool { | func (j fileHTTPResponse) IsSuccess() bool { | ||||||
| 	return true | 	return true | ||||||
| } | } | ||||||
| @@ -247,17 +303,20 @@ type downloadDataHTTPResponse struct { | |||||||
| 	data       []byte | 	data       []byte | ||||||
| 	filename   *string | 	filename   *string | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j downloadDataHTTPResponse) Write(g *gin.Context) { | func (j downloadDataHTTPResponse) Write(g *gin.Context) { | ||||||
| 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... | 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... | ||||||
| 	if j.filename != nil { | 	if j.filename != nil { | ||||||
| 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) | 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) | ||||||
|  |  | ||||||
| 	} | 	} | ||||||
| 	for _, v := range j.headers { | 	for _, v := range j.headers { | ||||||
| 		g.Header(v.Key, v.Val) | 		g.Header(v.Key, v.Val) | ||||||
| 	} | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Data(j.statusCode, j.mimetype, j.data) | 	g.Data(j.statusCode, j.mimetype, j.data) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -266,6 +325,11 @@ func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
| func (j downloadDataHTTPResponse) IsSuccess() bool { | func (j downloadDataHTTPResponse) IsSuccess() bool { | ||||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
| } | } | ||||||
| @@ -290,9 +354,16 @@ type redirectHTTPResponse struct { | |||||||
| 	statusCode int | 	statusCode int | ||||||
| 	url        string | 	url        string | ||||||
| 	headers    []headerval | 	headers    []headerval | ||||||
|  | 	cookies    []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j redirectHTTPResponse) Write(g *gin.Context) { | func (j redirectHTTPResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	g.Redirect(j.statusCode, j.url) | 	g.Redirect(j.statusCode, j.url) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -301,6 +372,11 @@ func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
| func (j redirectHTTPResponse) IsSuccess() bool { | func (j redirectHTTPResponse) IsSuccess() bool { | ||||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||||
| } | } | ||||||
| @@ -324,10 +400,19 @@ func (j redirectHTTPResponse) Headers() []string { | |||||||
| type jsonAPIErrResponse struct { | type jsonAPIErrResponse struct { | ||||||
| 	err     *exerr.ExErr | 	err     *exerr.ExErr | ||||||
| 	headers []headerval | 	headers []headerval | ||||||
|  | 	cookies []cookieval | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j jsonAPIErrResponse) Write(g *gin.Context) { | func (j jsonAPIErrResponse) Write(g *gin.Context) { | ||||||
|  | 	for _, v := range j.headers { | ||||||
|  | 		g.Header(v.Key, v.Val) | ||||||
|  | 	} | ||||||
|  | 	for _, v := range j.cookies { | ||||||
|  | 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||||
|  | 	} | ||||||
| 	j.err.Output(g) | 	j.err.Output(g) | ||||||
|  |  | ||||||
|  | 	j.err.CallListener(exerr.MethodOutput) | ||||||
| } | } | ||||||
|  |  | ||||||
| func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | ||||||
| @@ -335,6 +420,11 @@ func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | |||||||
| 	return j | 	return j | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||||
|  | 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||||
|  | 	return j | ||||||
|  | } | ||||||
|  |  | ||||||
| func (j jsonAPIErrResponse) IsSuccess() bool { | func (j jsonAPIErrResponse) IsSuccess() bool { | ||||||
| 	return false | 	return false | ||||||
| } | } | ||||||
|   | |||||||
| @@ -6,6 +6,6 @@ import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| type SessionObject interface { | type SessionObject interface { | ||||||
| 	Init(g *gin.Context, ctx context.Context) error | 	Init(g *gin.Context, ctx *AppContext) error | ||||||
| 	Finish(ctx context.Context, resp HTTPResponse) error | 	Finish(ctx context.Context, resp HTTPResponse) error | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										8
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								go.mod
									
									
									
									
									
								
							| @@ -4,14 +4,14 @@ go 1.21 | |||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/gin-gonic/gin v1.9.1 | 	github.com/gin-gonic/gin v1.9.1 | ||||||
|  | 	github.com/glebarez/go-sqlite v1.22.0 // only needed for tests -.- | ||||||
| 	github.com/jmoiron/sqlx v1.3.5 | 	github.com/jmoiron/sqlx v1.3.5 | ||||||
| 	github.com/rs/xid v1.5.0 | 	github.com/rs/xid v1.5.0 | ||||||
| 	github.com/rs/zerolog v1.31.0 | 	github.com/rs/zerolog v1.31.0 | ||||||
| 	go.mongodb.org/mongo-driver v1.13.1 | 	go.mongodb.org/mongo-driver v1.13.1 | ||||||
| 	golang.org/x/crypto v0.17.0 | 	golang.org/x/crypto v0.18.0 | ||||||
| 	golang.org/x/sys v0.16.0 | 	golang.org/x/sys v0.16.0 | ||||||
| 	golang.org/x/term v0.16.0 | 	golang.org/x/term v0.16.0 | ||||||
| 	github.com/glebarez/go-sqlite  v1.22.0 // only needed for tests -.- |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| @@ -23,7 +23,7 @@ require ( | |||||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||||
| 	github.com/go-playground/locales v0.14.1 // indirect | 	github.com/go-playground/locales v0.14.1 // indirect | ||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| 	github.com/go-playground/validator/v10 v10.16.0 // indirect | 	github.com/go-playground/validator/v10 v10.17.0 // indirect | ||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.2 // indirect | ||||||
| 	github.com/golang/snappy v0.0.4 // indirect | 	github.com/golang/snappy v0.0.4 // indirect | ||||||
| 	github.com/google/uuid v1.5.0 // indirect | 	github.com/google/uuid v1.5.0 // indirect | ||||||
| @@ -45,7 +45,7 @@ require ( | |||||||
| 	github.com/xdg-go/stringprep v1.0.4 // indirect | 	github.com/xdg-go/stringprep v1.0.4 // indirect | ||||||
| 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | 	github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect | ||||||
| 	golang.org/x/arch v0.7.0 // indirect | 	golang.org/x/arch v0.7.0 // indirect | ||||||
| 	golang.org/x/net v0.19.0 // indirect | 	golang.org/x/net v0.20.0 // indirect | ||||||
| 	golang.org/x/sync v0.6.0 // indirect | 	golang.org/x/sync v0.6.0 // indirect | ||||||
| 	golang.org/x/text v0.14.0 // indirect | 	golang.org/x/text v0.14.0 // indirect | ||||||
| 	google.golang.org/protobuf v1.32.0 // indirect | 	google.golang.org/protobuf v1.32.0 // indirect | ||||||
|   | |||||||
							
								
								
									
										7
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										7
									
								
								go.sum
									
									
									
									
									
								
							| @@ -31,11 +31,14 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn | |||||||
| github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | ||||||
| github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | ||||||
| github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
|  | github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74= | ||||||
|  | github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||||
| github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | ||||||
| github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= | github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= | ||||||
|  | github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | ||||||
| github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||||
| github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= | github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= | ||||||
| github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= | ||||||
| @@ -126,6 +129,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y | |||||||
| golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= | ||||||
| golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= | golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= | ||||||
| golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= | golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= | ||||||
|  | golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= | ||||||
|  | golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= | ||||||
| golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= | ||||||
| golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= | ||||||
| golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= | ||||||
| @@ -134,6 +139,8 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx | |||||||
| golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= | ||||||
| golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= | golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= | ||||||
| golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= | golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= | ||||||
|  | golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= | ||||||
|  | golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= | ||||||
| golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | ||||||
| golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| package goext | package goext | ||||||
|  |  | ||||||
| const GoextVersion = "0.0.360" | const GoextVersion = "0.0.379" | ||||||
|  |  | ||||||
| const GoextVersionTimestamp = "2024-01-06T01:31:07+0100" | const GoextVersionTimestamp = "2024-01-19T17:30:20+0100" | ||||||
|   | |||||||
							
								
								
									
										21
									
								
								langext/must.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								langext/must.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | |||||||
|  | package langext | ||||||
|  |  | ||||||
|  | // Must returns a value and panics on error | ||||||
|  | // | ||||||
|  | // Usage: Must(methodWithError(...)) | ||||||
|  | func Must[T any](v T, err error) T { | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err) | ||||||
|  | 	} | ||||||
|  | 	return v | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // MustBool returns a value and panics on missing | ||||||
|  | // | ||||||
|  | // Usage: MustBool(methodWithOkayReturn(...)) | ||||||
|  | func MustBool[T any](v T, ok bool) T { | ||||||
|  | 	if !ok { | ||||||
|  | 		panic("not ok") | ||||||
|  | 	} | ||||||
|  | 	return v | ||||||
|  | } | ||||||
| @@ -5,7 +5,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type Filter interface { | type MongoFilter interface { | ||||||
| 	FilterQuery() mongo.Pipeline | 	FilterQuery() mongo.Pipeline | ||||||
| 	Sort() bson.D | 	Sort() bson.D | ||||||
| } | } | ||||||
| @@ -23,6 +23,6 @@ func (d dynamicFilter) Sort() bson.D { | |||||||
| 	return d.sort | 	return d.sort | ||||||
| } | } | ||||||
|  |  | ||||||
| func CreateFilter(pipeline mongo.Pipeline, sort bson.D) Filter { | func CreateFilter(pipeline mongo.Pipeline, sort bson.D) MongoFilter { | ||||||
| 	return dynamicFilter{pipeline: pipeline, sort: sort} | 	return dynamicFilter{pipeline: pipeline, sort: sort} | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										98
									
								
								reflectext/mapAccess.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										98
									
								
								reflectext/mapAccess.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,98 @@ | |||||||
|  | package reflectext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"reflect" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // GetMapPath returns the value deep inside a hierahically nested map structure | ||||||
|  | // eg: | ||||||
|  | // x := langext.H{"K1": langext.H{"K2": 665}} | ||||||
|  | // GetMapPath[int](x, "K1.K2") == 665 | ||||||
|  | func GetMapPath[TData any](mapval any, path string) (TData, bool) { | ||||||
|  | 	var ok bool | ||||||
|  |  | ||||||
|  | 	split := strings.Split(path, ".") | ||||||
|  |  | ||||||
|  | 	for i, key := range split { | ||||||
|  |  | ||||||
|  | 		if i < len(split)-1 { | ||||||
|  | 			mapval, ok = GetMapField[any](mapval, key) | ||||||
|  | 			if !ok { | ||||||
|  | 				return *new(TData), false | ||||||
|  | 			} | ||||||
|  | 		} else { | ||||||
|  | 			return GetMapField[TData](mapval, key) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *new(TData), false | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // GetMapField gets the value of a map, without knowing the actual types (mapval is any) | ||||||
|  | // eg: | ||||||
|  | // x := langext.H{"K1": 665} | ||||||
|  | // GetMapPath[int](x, "K1") == 665 | ||||||
|  | // | ||||||
|  | // works with aliased types and autom. dereferences pointes | ||||||
|  | func GetMapField[TData any, TKey comparable](mapval any, key TKey) (TData, bool) { | ||||||
|  |  | ||||||
|  | 	rval := reflect.ValueOf(mapval) | ||||||
|  |  | ||||||
|  | 	for rval.Kind() == reflect.Ptr && !rval.IsNil() { | ||||||
|  | 		rval = rval.Elem() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if rval.Kind() != reflect.Map { | ||||||
|  | 		return *new(TData), false // mapval is not a map | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	kval := reflect.ValueOf(key) | ||||||
|  |  | ||||||
|  | 	if !kval.Type().AssignableTo(rval.Type().Key()) { | ||||||
|  | 		return *new(TData), false // key cannot index mapval | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	eval := rval.MapIndex(kval) | ||||||
|  | 	if !eval.IsValid() { | ||||||
|  | 		return *new(TData), false // key does not exist in mapval | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	destType := reflect.TypeOf(new(TData)).Elem() | ||||||
|  |  | ||||||
|  | 	if eval.Type() == destType { | ||||||
|  | 		return eval.Interface().(TData), true | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if eval.CanConvert(destType) && !preventConvert(eval.Type(), destType) { | ||||||
|  | 		return eval.Convert(destType).Interface().(TData), true | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if (eval.Kind() == reflect.Ptr || eval.Kind() == reflect.Interface) && eval.IsNil() && destType.Kind() == reflect.Ptr { | ||||||
|  | 		return *new(TData), false // special case: mapval[key] is nil | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	for (eval.Kind() == reflect.Ptr || eval.Kind() == reflect.Interface) && !eval.IsNil() { | ||||||
|  | 		eval = eval.Elem() | ||||||
|  |  | ||||||
|  | 		if eval.Type() == destType { | ||||||
|  | 			return eval.Interface().(TData), true | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if eval.CanConvert(destType) && !preventConvert(eval.Type(), destType) { | ||||||
|  | 			return eval.Convert(destType).Interface().(TData), true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return *new(TData), false // mapval[key] is not of type TData | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func preventConvert(t1 reflect.Type, t2 reflect.Type) bool { | ||||||
|  | 	if t1.Kind() == reflect.String && t1.Kind() != reflect.String { | ||||||
|  | 		return true | ||||||
|  | 	} | ||||||
|  | 	if t2.Kind() == reflect.String && t1.Kind() != reflect.String { | ||||||
|  | 		return true | ||||||
|  | 	} | ||||||
|  | 	return false | ||||||
|  | } | ||||||
							
								
								
									
										55
									
								
								reflectext/mapAccess_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								reflectext/mapAccess_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | |||||||
|  | package reflectext | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||||
|  | 	"testing" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func TestGetMapPath(t *testing.T) { | ||||||
|  | 	type PseudoInt = int64 | ||||||
|  |  | ||||||
|  | 	mymap2 := map[string]map[string]any{"Test": {"Second": 3}} | ||||||
|  |  | ||||||
|  | 	var maany2 any = mymap2 | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test.Second")), "3 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test2.Second")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test.Second2")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test.Second")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test2.Second")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test.Second2")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test.Second")), "3 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test2.Second")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test.Second2")), "0 false") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func TestGetMapField(t *testing.T) { | ||||||
|  | 	type PseudoInt = int64 | ||||||
|  |  | ||||||
|  | 	mymap1 := map[string]any{"Test": 12} | ||||||
|  | 	mymap2 := map[string]int{"Test": 12} | ||||||
|  |  | ||||||
|  | 	var maany1 any = mymap1 | ||||||
|  | 	var maany2 any = mymap2 | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany1, "Test")), "12 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany1, "Test2")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany1, "Test")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany1, "Test2")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany1, "Test")), "12 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany1, "Test2")), "0 false") | ||||||
|  |  | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany2, "Test")), "12 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany2, "Test2")), "0 false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany2, "Test")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany2, "Test2")), "false") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test")), "12 true") | ||||||
|  | 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test2")), "0 false") | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func main2() { | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func main() { | ||||||
|  | } | ||||||
| @@ -53,7 +53,7 @@ func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn strin | |||||||
| 				return "", nil, err | 				return "", nil, err | ||||||
| 			} | 			} | ||||||
|  |  | ||||||
| 			setClauses = append(setClauses, fmt.Sprintf("(%s = :%s)", columnName, params.Add(val))) | 			setClauses = append(setClauses, fmt.Sprintf("%s = :%s", columnName, params.Add(val))) | ||||||
|  |  | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| @@ -69,3 +69,52 @@ func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn strin | |||||||
| 	//goland:noinspection SqlNoDataSourceInspection | 	//goland:noinspection SqlNoDataSourceInspection | ||||||
| 	return fmt.Sprintf("UPDATE %s SET %s WHERE %s", tableName, strings.Join(setClauses, ", "), matchClause), params, nil | 	return fmt.Sprintf("UPDATE %s SET %s WHERE %s", tableName, strings.Join(setClauses, ", "), matchClause), params, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func BuildInsertStatement(q Queryable, tableName string, obj any) (string, PP, error) { | ||||||
|  | 	rval := reflect.ValueOf(obj) | ||||||
|  | 	rtyp := rval.Type() | ||||||
|  |  | ||||||
|  | 	params := PP{} | ||||||
|  |  | ||||||
|  | 	fields := make([]string, 0) | ||||||
|  | 	values := make([]string, 0) | ||||||
|  |  | ||||||
|  | 	for i := 0; i < rtyp.NumField(); i++ { | ||||||
|  |  | ||||||
|  | 		rsfield := rtyp.Field(i) | ||||||
|  | 		rvfield := rval.Field(i) | ||||||
|  |  | ||||||
|  | 		if !rsfield.IsExported() { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		columnName := rsfield.Tag.Get("db") | ||||||
|  | 		if columnName == "" || columnName == "-" { | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if rsfield.Type.Kind() == reflect.Ptr && rvfield.IsNil() { | ||||||
|  |  | ||||||
|  | 			fields = append(fields, columnName) | ||||||
|  | 			values = append(values, "NULL") | ||||||
|  |  | ||||||
|  | 		} else { | ||||||
|  |  | ||||||
|  | 			val, err := convertValueToDB(q, rvfield.Interface()) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return "", nil, err | ||||||
|  | 			} | ||||||
|  |  | ||||||
|  | 			fields = append(fields, columnName) | ||||||
|  | 			values = append(values, ":"+params.Add(val)) | ||||||
|  |  | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if len(fields) == 0 { | ||||||
|  | 		return "", nil, exerr.New(exerr.TypeSQLBuild, "no fields found in object").Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	//goland:noinspection SqlNoDataSourceInspection | ||||||
|  | 	return fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", tableName, strings.Join(fields, ", "), strings.Join(values, ", ")), params, nil | ||||||
|  | } | ||||||
|   | |||||||
| @@ -4,6 +4,7 @@ import ( | |||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | 	"gogs.mikescher.com/BlackForestBytes/goext/rfctime" | ||||||
| 	"reflect" | 	"reflect" | ||||||
| @@ -17,9 +18,9 @@ type DBTypeConverter interface { | |||||||
| 	DBToModel(v any) (any, error) | 	DBToModel(v any) (any, error) | ||||||
| } | } | ||||||
|  |  | ||||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int](func(v bool) (int, error) { | var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) { | ||||||
| 	return langext.Conditional(v, 1, 0), nil | 	return langext.Conditional(v, int64(1), int64(0)), nil | ||||||
| }, func(v int) (bool, error) { | }, func(v int64) (bool, error) { | ||||||
| 	if v == 0 { | 	if v == 0 { | ||||||
| 		return false, nil | 		return false, nil | ||||||
| 	} | 	} | ||||||
| @@ -105,6 +106,40 @@ var ConverterJsonArrToString = NewDBTypeConverter[JsonArr, string](func(v JsonAr | |||||||
| 	return mrsh, nil | 	return mrsh, nil | ||||||
| }) | }) | ||||||
|  |  | ||||||
|  | var ConverterExErrCategoryToString = NewDBTypeConverter[exerr.ErrorCategory, string](func(v exerr.ErrorCategory) (string, error) { | ||||||
|  | 	return v.Category, nil | ||||||
|  | }, func(v string) (exerr.ErrorCategory, error) { | ||||||
|  | 	for _, cat := range exerr.AllCategories { | ||||||
|  | 		if cat.Category == v { | ||||||
|  | 			return cat, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return exerr.CatUser, errors.New("failed to convert '" + v + "' to exerr.ErrorCategory") | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterExErrSeverityToString = NewDBTypeConverter[exerr.ErrorSeverity, string](func(v exerr.ErrorSeverity) (string, error) { | ||||||
|  | 	return v.Severity, nil | ||||||
|  | }, func(v string) (exerr.ErrorSeverity, error) { | ||||||
|  | 	for _, sev := range exerr.AllSeverities { | ||||||
|  | 		if sev.Severity == v { | ||||||
|  | 			return sev, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return exerr.SevErr, errors.New("failed to convert '" + v + "' to exerr.ErrorSeverity") | ||||||
|  | }) | ||||||
|  |  | ||||||
|  | var ConverterExErrTypeToString = NewDBTypeConverter[exerr.ErrorType, string](func(v exerr.ErrorType) (string, error) { | ||||||
|  | 	return v.Key, nil | ||||||
|  | }, func(v string) (exerr.ErrorType, error) { | ||||||
|  | 	for _, etp := range exerr.ListRegisteredTypes() { | ||||||
|  | 		if etp.Key == v { | ||||||
|  | 			return etp, nil | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return exerr.NewType(v, nil), nil | ||||||
|  | }) | ||||||
|  |  | ||||||
| type dbTypeConverterImpl[TModelData any, TDBData any] struct { | type dbTypeConverterImpl[TModelData any, TDBData any] struct { | ||||||
| 	dbTypeString    string | 	dbTypeString    string | ||||||
| 	modelTypeString string | 	modelTypeString string | ||||||
|   | |||||||
| @@ -4,6 +4,7 @@ import ( | |||||||
| 	"context" | 	"context" | ||||||
| 	"database/sql" | 	"database/sql" | ||||||
| 	"github.com/jmoiron/sqlx" | 	"github.com/jmoiron/sqlx" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"sync" | 	"sync" | ||||||
| ) | ) | ||||||
| @@ -45,7 +46,7 @@ func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Resul | |||||||
| 	for _, v := range db.lstr { | 	for _, v := range db.lstr { | ||||||
| 		err := v.PreExec(ctx, nil, &sqlstr, &prep) | 		err := v.PreExec(ctx, nil, &sqlstr, &prep) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -56,7 +57,7 @@ func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Resul | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 	} | 	} | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
| @@ -66,7 +67,7 @@ func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Ro | |||||||
| 	for _, v := range db.lstr { | 	for _, v := range db.lstr { | ||||||
| 		err := v.PreQuery(ctx, nil, &sqlstr, &prep) | 		err := v.PreQuery(ctx, nil, &sqlstr, &prep) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -77,7 +78,7 @@ func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Ro | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 	} | 	} | ||||||
| 	return rows, nil | 	return rows, nil | ||||||
| } | } | ||||||
| @@ -97,7 +98,7 @@ func (db *database) Ping(ctx context.Context) error { | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return exerr.Wrap(err, "Failed to [ping] sql database").Build() | ||||||
| 	} | 	} | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
| @@ -117,7 +118,7 @@ func (db *database) BeginTransaction(ctx context.Context, iso sql.IsolationLevel | |||||||
|  |  | ||||||
| 	xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso}) | 	xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso}) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to start sql transaction").Build() | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	for _, v := range db.lstr { | 	for _, v := range db.lstr { | ||||||
| @@ -150,4 +151,7 @@ func (db *database) RegisterDefaultConverter() { | |||||||
| 	db.RegisterConverter(ConverterRFC339NanoTimeToString) | 	db.RegisterConverter(ConverterRFC339NanoTimeToString) | ||||||
| 	db.RegisterConverter(ConverterJsonObjToString) | 	db.RegisterConverter(ConverterJsonObjToString) | ||||||
| 	db.RegisterConverter(ConverterJsonArrToString) | 	db.RegisterConverter(ConverterJsonArrToString) | ||||||
|  | 	db.RegisterConverter(ConverterExErrCategoryToString) | ||||||
|  | 	db.RegisterConverter(ConverterExErrSeverityToString) | ||||||
|  | 	db.RegisterConverter(ConverterExErrTypeToString) | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										126
									
								
								sq/paginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										126
									
								
								sq/paginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,126 @@ | |||||||
|  | package sq | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"context" | ||||||
|  | 	"fmt" | ||||||
|  | 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
|  | 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type PaginateFilter interface { | ||||||
|  | 	SQL(params PP) (filterClause string, joinClause string, joinTables []string) | ||||||
|  | 	Sort() []FilterSort | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type FilterSort struct { | ||||||
|  | 	Field     string | ||||||
|  | 	Direction ct.SortDirection | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||||
|  | 	prepParams := PP{} | ||||||
|  |  | ||||||
|  | 	sortOrder := filter.Sort() | ||||||
|  | 	sortCond := "" | ||||||
|  | 	if len(sortOrder) > 0 { | ||||||
|  | 		sortCond = "ORDER BY " | ||||||
|  | 		for i, v := range sortOrder { | ||||||
|  | 			if i > 0 { | ||||||
|  | 				sortCond += ", " | ||||||
|  | 			} | ||||||
|  | 			sortCond += v.Field + " " + string(v.Direction) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pageCond := "" | ||||||
|  | 	if limit != nil { | ||||||
|  | 		pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1))) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	filterCond, joinCond, joinTables := filter.SQL(prepParams) | ||||||
|  |  | ||||||
|  | 	selectCond := table + ".*" | ||||||
|  | 	for _, v := range joinTables { | ||||||
|  | 		selectCond += ", " + v + ".*" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond | ||||||
|  | 	sqlQueryCount := "SELECT " + "COUNT(*)" + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " | ||||||
|  |  | ||||||
|  | 	rows, err := q.Query(ctx, sqlQueryData, prepParams) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	entities, err := ScanAll[TData](ctx, q, rows, scanMode, scanSec, true) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode paginated entries from DB").Str("table", table).Int("page", page).Any("limit", limit).Str("scanMode", string(scanMode)).Str("scanSec", string(scanSec)).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if page == 1 && (limit == nil || len(entities) <= *limit) { | ||||||
|  | 		return entities, pag.Pagination{ | ||||||
|  | 			Page:             1, | ||||||
|  | 			Limit:            langext.Coalesce(limit, len(entities)), | ||||||
|  | 			TotalPages:       1, | ||||||
|  | 			TotalItems:       len(entities), | ||||||
|  | 			CurrentPageCount: 1, | ||||||
|  | 		}, nil | ||||||
|  | 	} else { | ||||||
|  |  | ||||||
|  | 		countRows, err := q.Query(ctx, sqlQueryCount, prepParams) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to query total-count of paginated entries from DB").Str("table", table).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if !countRows.Next() { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.New(exerr.TypeSQLDecode, "SQL COUNT(*) query returned no rows").Str("table", table).Any("filter", filter).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		var countRes int | ||||||
|  | 		err = countRows.Scan(&countRes) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode total-count of paginated entries from DB").Str("table", table).Build() | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		if len(entities) > *limit { | ||||||
|  | 			entities = entities[:*limit] | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		paginationObj := pag.Pagination{ | ||||||
|  | 			Page:             page, | ||||||
|  | 			Limit:            langext.Coalesce(limit, countRes), | ||||||
|  | 			TotalPages:       pag.CalcPaginationTotalPages(countRes, langext.Coalesce(limit, countRes)), | ||||||
|  | 			TotalItems:       countRes, | ||||||
|  | 			CurrentPageCount: len(entities), | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		return entities, paginationObj, nil | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | ||||||
|  | 	prepParams := PP{} | ||||||
|  |  | ||||||
|  | 	filterCond, joinCond, _ := filter.SQL(prepParams) | ||||||
|  |  | ||||||
|  | 	sqlQueryCount := "SELECT " + "COUNT(*)" + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " )" | ||||||
|  |  | ||||||
|  | 	countRows, err := q.Query(ctx, sqlQueryCount, prepParams) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, exerr.Wrap(err, "failed to query count of entries from DB").Str("table", table).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	if !countRows.Next() { | ||||||
|  | 		return 0, exerr.New(exerr.TypeSQLDecode, "SQL COUNT(*) query returned no rows").Str("table", table).Any("filter", filter).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	var countRes int | ||||||
|  | 	err = countRows.Scan(&countRes) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, exerr.Wrap(err, "failed to decode count of entries from DB").Str("table", table).Build() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return countRes, nil | ||||||
|  | } | ||||||
| @@ -93,3 +93,62 @@ func TestTypeConverter2(t *testing.T) { | |||||||
| 	tst.AssertEqual(t, "002", r.ID) | 	tst.AssertEqual(t, "002", r.ID) | ||||||
| 	tst.AssertEqual(t, t0.UnixNano(), r.Timestamp.UnixNano()) | 	tst.AssertEqual(t, t0.UnixNano(), r.Timestamp.UnixNano()) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func TestTypeConverter3(t *testing.T) { | ||||||
|  |  | ||||||
|  | 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||||
|  | 		sqlite.RegisterAsSQLITE3() | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	type RequestData struct { | ||||||
|  | 		ID        string                 `db:"id"` | ||||||
|  | 		Timestamp *rfctime.UnixMilliTime `db:"timestamp"` | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	ctx := context.Background() | ||||||
|  |  | ||||||
|  | 	dbdir := t.TempDir() | ||||||
|  | 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||||
|  |  | ||||||
|  | 	tst.AssertNoErr(t, os.MkdirAll(dbdir, os.ModePerm)) | ||||||
|  |  | ||||||
|  | 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||||
|  |  | ||||||
|  | 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||||
|  |  | ||||||
|  | 	db := NewDB(xdb) | ||||||
|  | 	db.RegisterDefaultConverter() | ||||||
|  |  | ||||||
|  | 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	t0 := rfctime.NewUnixMilli(time.Date(2012, 03, 01, 16, 0, 0, 0, time.UTC)) | ||||||
|  |  | ||||||
|  | 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||||
|  | 		ID:        "001", | ||||||
|  | 		Timestamp: &t0, | ||||||
|  | 	}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||||
|  | 		ID:        "002", | ||||||
|  | 		Timestamp: nil, | ||||||
|  | 	}) | ||||||
|  | 	tst.AssertNoErr(t, err) | ||||||
|  |  | ||||||
|  | 	{ | ||||||
|  | 		r1, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '001'", PP{}, SModeExtended, Safe) | ||||||
|  | 		tst.AssertNoErr(t, err) | ||||||
|  | 		fmt.Printf("%+v\n", r1) | ||||||
|  | 		tst.AssertEqual(t, "001", r1.ID) | ||||||
|  | 		tst.AssertEqual(t, t0.UnixNano(), r1.Timestamp.UnixNano()) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	{ | ||||||
|  | 		r2, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '002'", PP{}, SModeExtended, Safe) | ||||||
|  | 		tst.AssertNoErr(t, err) | ||||||
|  | 		fmt.Printf("%+v\n", r2) | ||||||
|  | 		tst.AssertEqual(t, "002", r2.ID) | ||||||
|  | 		tst.AssertEqual(t, nil, r2.Timestamp) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|   | |||||||
| @@ -7,6 +7,7 @@ import ( | |||||||
| 	"github.com/jmoiron/sqlx/reflectx" | 	"github.com/jmoiron/sqlx/reflectx" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| 	"reflect" | 	"reflect" | ||||||
|  | 	"strings" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| // forked from sqlx, but added ability to unmarshal optional-nested structs | // forked from sqlx, but added ability to unmarshal optional-nested structs | ||||||
| @@ -18,7 +19,7 @@ type StructScanner struct { | |||||||
|  |  | ||||||
| 	fields    [][]int | 	fields    [][]int | ||||||
| 	values    []any | 	values    []any | ||||||
| 	converter []DBTypeConverter | 	converter []ssConverter | ||||||
| 	columns   []string | 	columns   []string | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -30,6 +31,11 @@ func NewStructScanner(rows *sqlx.Rows, unsafe bool) *StructScanner { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type ssConverter struct { | ||||||
|  | 	Converter DBTypeConverter | ||||||
|  | 	RefCount  int | ||||||
|  | } | ||||||
|  |  | ||||||
| func (r *StructScanner) Start(dest any) error { | func (r *StructScanner) Start(dest any) error { | ||||||
| 	v := reflect.ValueOf(dest) | 	v := reflect.ValueOf(dest) | ||||||
|  |  | ||||||
| @@ -49,7 +55,7 @@ func (r *StructScanner) Start(dest any) error { | |||||||
| 		return fmt.Errorf("missing destination name %s in %T", columns[f], dest) | 		return fmt.Errorf("missing destination name %s in %T", columns[f], dest) | ||||||
| 	} | 	} | ||||||
| 	r.values = make([]interface{}, len(columns)) | 	r.values = make([]interface{}, len(columns)) | ||||||
| 	r.converter = make([]DBTypeConverter, len(columns)) | 	r.converter = make([]ssConverter, len(columns)) | ||||||
|  |  | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
| @@ -143,13 +149,19 @@ func (r *StructScanner) StructScanExt(q Queryable, dest any) error { | |||||||
|  |  | ||||||
| 			f.Set(reflect.Zero(f.Type())) // set to nil | 			f.Set(reflect.Zero(f.Type())) // set to nil | ||||||
| 		} else { | 		} else { | ||||||
| 			if r.converter[i] != nil { | 			if r.converter[i].Converter != nil { | ||||||
| 				val3 := val2.Elem().Interface() | 				val3 := val2.Elem() | ||||||
| 				conv3, err := r.converter[i].DBToModel(val3) | 				conv3, err := r.converter[i].Converter.DBToModel(val3.Interface()) | ||||||
| 				if err != nil { | 				if err != nil { | ||||||
| 					return err | 					return err | ||||||
| 				} | 				} | ||||||
| 				f.Set(reflect.ValueOf(conv3)) | 				conv3RVal := reflect.ValueOf(conv3) | ||||||
|  | 				for j := 0; j < r.converter[i].RefCount; j++ { | ||||||
|  | 					newConv3Val := reflect.New(conv3RVal.Type()) | ||||||
|  | 					newConv3Val.Elem().Set(conv3RVal) | ||||||
|  | 					conv3RVal = newConv3Val | ||||||
|  | 				} | ||||||
|  | 				f.Set(conv3RVal) | ||||||
| 			} else { | 			} else { | ||||||
| 				f.Set(val2.Elem()) | 				f.Set(val2.Elem()) | ||||||
| 			} | 			} | ||||||
| @@ -184,7 +196,7 @@ func (r *StructScanner) StructScanBase(dest any) error { | |||||||
| } | } | ||||||
|  |  | ||||||
| // fieldsByTraversal forked from github.com/jmoiron/sqlx@v1.3.5/sqlx.go | // fieldsByTraversal forked from github.com/jmoiron/sqlx@v1.3.5/sqlx.go | ||||||
| func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, values []interface{}, converter []DBTypeConverter) error { | func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, values []interface{}, converter []ssConverter) error { | ||||||
| 	v = reflect.Indirect(v) | 	v = reflect.Indirect(v) | ||||||
| 	if v.Kind() != reflect.Struct { | 	if v.Kind() != reflect.Struct { | ||||||
| 		return errors.New("argument not a struct") | 		return errors.New("argument not a struct") | ||||||
| @@ -205,14 +217,26 @@ func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, | |||||||
| 				_v := langext.Ptr[any](nil) | 				_v := langext.Ptr[any](nil) | ||||||
| 				values[i] = _v | 				values[i] = _v | ||||||
| 				foundConverter = true | 				foundConverter = true | ||||||
| 				converter[i] = conv | 				converter[i] = ssConverter{Converter: conv, RefCount: 0} | ||||||
| 				break | 				break | ||||||
| 			} | 			} | ||||||
| 		} | 		} | ||||||
|  | 		if !foundConverter { | ||||||
|  | 			// also allow non-pointer converter for pointer-types | ||||||
|  | 			for _, conv := range q.ListConverter() { | ||||||
|  | 				if conv.ModelTypeString() == strings.TrimLeft(typeStr, "*") { | ||||||
|  | 					_v := langext.Ptr[any](nil) | ||||||
|  | 					values[i] = _v | ||||||
|  | 					foundConverter = true | ||||||
|  | 					converter[i] = ssConverter{Converter: conv, RefCount: len(typeStr) - len(strings.TrimLeft(typeStr, "*"))} // kind hacky way to get the amount of ptr before <f>, but it works... | ||||||
|  | 					break | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		if !foundConverter { | 		if !foundConverter { | ||||||
| 			values[i] = reflect.New(reflect.PointerTo(f.Type())).Interface() | 			values[i] = reflect.New(reflect.PointerTo(f.Type())).Interface() | ||||||
| 			converter[i] = nil | 			converter[i] = ssConverter{Converter: nil, RefCount: -1} | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	return nil | 	return nil | ||||||
|   | |||||||
| @@ -4,6 +4,7 @@ import ( | |||||||
| 	"context" | 	"context" | ||||||
| 	"database/sql" | 	"database/sql" | ||||||
| 	"github.com/jmoiron/sqlx" | 	"github.com/jmoiron/sqlx" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -48,7 +49,7 @@ func (tx *transaction) Rollback() error { | |||||||
| 	for _, v := range tx.db.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		err := v.PreTxRollback(tx.id) | 		err := v.PreTxRollback(tx.id) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return exerr.Wrap(err, "failed to call SQL pre-rollback listener").Int("tx.id", int(tx.id)).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -69,7 +70,7 @@ func (tx *transaction) Commit() error { | |||||||
| 	for _, v := range tx.db.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		err := v.PreTxCommit(tx.id) | 		err := v.PreTxCommit(tx.id) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return exerr.Wrap(err, "failed to call SQL pre-commit listener").Int("tx.id", int(tx.id)).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -91,7 +92,7 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re | |||||||
| 	for _, v := range tx.db.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | 		err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -106,7 +107,7 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 	} | 	} | ||||||
| 	return res, nil | 	return res, nil | ||||||
| } | } | ||||||
| @@ -116,7 +117,7 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx | |||||||
| 	for _, v := range tx.db.lstr { | 	for _, v := range tx.db.lstr { | ||||||
| 		err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | 		err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| @@ -131,7 +132,7 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||||
| 	} | 	} | ||||||
| 	return rows, nil | 	return rows, nil | ||||||
| } | } | ||||||
|   | |||||||
| @@ -45,7 +45,7 @@ type Coll[TData any] struct { | |||||||
| 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | 	customDecoder       *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface) | ||||||
| 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | 	isInterfaceDataType bool                                                     // true if TData is an interface (not a struct) | ||||||
| 	unmarshalHooks      []func(d TData) TData                                    // called for every object after unmarshalling | 	unmarshalHooks      []func(d TData) TData                                    // called for every object after unmarshalling | ||||||
| 	extraModPipeline    mongo.Pipeline                                           // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | 	extraModPipeline    []func(ctx context.Context) mongo.Pipeline               // appended to pipelines after filter/limit/skip/sort, used for $lookup, $set, $unset, $project, etc | ||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Collection() *mongo.Collection { | func (c *Coll[TData]) Collection() *mongo.Collection { | ||||||
| @@ -83,7 +83,13 @@ func (c *Coll[TData]) WithUnmarshalHook(fn func(d TData) TData) *Coll[TData] { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] { | func (c *Coll[TData]) WithModifyingPipeline(p mongo.Pipeline) *Coll[TData] { | ||||||
| 	c.extraModPipeline = append(c.extraModPipeline, p...) | 	c.extraModPipeline = append(c.extraModPipeline, func(ctx context.Context) mongo.Pipeline { return p }) | ||||||
|  |  | ||||||
|  | 	return c | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func (c *Coll[TData]) WithModifyingPipelineFunc(fn func(ctx context.Context) mongo.Pipeline) *Coll[TData] { | ||||||
|  | 	c.extraModPipeline = append(c.extraModPipeline, fn) | ||||||
|  |  | ||||||
| 	return c | 	return c | ||||||
| } | } | ||||||
|   | |||||||
| @@ -10,7 +10,9 @@ import ( | |||||||
|  |  | ||||||
| func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) { | ||||||
|  |  | ||||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -27,7 +29,9 @@ func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, op | |||||||
|  |  | ||||||
| func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) { | ||||||
|  |  | ||||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| @@ -47,7 +51,9 @@ func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeli | |||||||
|  |  | ||||||
| func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) { | ||||||
|  |  | ||||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | 	cursor, err := c.coll.Aggregate(ctx, pipeline, opts...) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
|   | |||||||
| @@ -32,7 +32,9 @@ func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options. | |||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	for _, opt := range opts { | 	for _, opt := range opts { | ||||||
| 		if opt != nil && opt.Projection != nil { | 		if opt != nil && opt.Projection != nil { | ||||||
|   | |||||||
| @@ -71,7 +71,9 @@ func (c *Coll[TData]) findOneInternal(ctx context.Context, filter bson.M, allowN | |||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | 		pipeline = append(pipeline, bson.D{{Key: "$match", Value: filter}}) | ||||||
| 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}}) | 		pipeline = append(pipeline, bson.D{{Key: "$limit", Value: 1}}) | ||||||
|  |  | ||||||
| 		pipeline = langext.ArrConcat(pipeline, c.extraModPipeline) | 		for _, ppl := range c.extraModPipeline { | ||||||
|  | 			pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 		} | ||||||
|  |  | ||||||
| 		cursor, err := c.coll.Aggregate(ctx, pipeline) | 		cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
|   | |||||||
| @@ -6,6 +6,7 @@ import ( | |||||||
| 	"go.mongodb.org/mongo-driver/mongo" | 	"go.mongodb.org/mongo-driver/mongo" | ||||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||||
|  | 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) { | ||||||
| @@ -50,7 +51,10 @@ func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, | |||||||
| 	} | 	} | ||||||
|  |  | ||||||
| 	pipeline = append(pipeline, paginationPipeline...) | 	pipeline = append(pipeline, paginationPipeline...) | ||||||
| 	pipeline = append(pipeline, c.extraModPipeline...) |  | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		pipeline = langext.ArrConcat(pipeline, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	cursor, err := c.coll.Aggregate(ctx, pipeline) | 	cursor, err := c.coll.Aggregate(ctx, pipeline) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
|   | |||||||
| @@ -9,7 +9,7 @@ import ( | |||||||
| 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, limit *int) ([]TData, pag.Pagination, error) { | func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||||
| 	type totalCountResult struct { | 	type totalCountResult struct { | ||||||
| 		Count int `bson:"count"` | 		Count int `bson:"count"` | ||||||
| 	} | 	} | ||||||
| @@ -42,7 +42,12 @@ func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, | |||||||
| 	pipelineCount := mongo.Pipeline{} | 	pipelineCount := mongo.Pipeline{} | ||||||
| 	pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}}) | 	pipelineCount = append(pipelineCount, bson.D{{Key: "$count", Value: "count"}}) | ||||||
|  |  | ||||||
| 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, c.extraModPipeline, pipelineSort) | 	extrModPipelineResolved := mongo.Pipeline{} | ||||||
|  | 	for _, ppl := range c.extraModPipeline { | ||||||
|  | 		extrModPipelineResolved = langext.ArrConcat(extrModPipelineResolved, ppl(ctx)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	pipelineList := langext.ArrConcat(pipelineFilter, pipelineSort, pipelinePaginate, extrModPipelineResolved, pipelineSort) | ||||||
| 	pipelineTotalCount := langext.ArrConcat(pipelineFilter, pipelineCount) | 	pipelineTotalCount := langext.ArrConcat(pipelineFilter, pipelineCount) | ||||||
|  |  | ||||||
| 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | 	cursorList, err := c.coll.Aggregate(ctx, pipelineList) | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user