Compare commits
	
		
			15 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| b9d0348735 | |||
| b9e9575b9b | |||
| 295a098eb4 | |||
| b69a082bb1 | |||
| a4a8c83d17 | |||
| e952176bb0 | |||
| d99adb203b | |||
| f1f91f4cfa | |||
| 2afb265ea4 | |||
| be24f7a190 | |||
| aae8a706e9 | |||
| 7d64f18f54 | |||
| d08b2e565a | |||
| d29e84894d | |||
| 617298c366 | 
							
								
								
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										263
									
								
								cryptext/pronouncablePassword.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,263 @@ | ||||
| package cryptext | ||||
|  | ||||
| import ( | ||||
| 	"crypto/rand" | ||||
| 	"io" | ||||
| 	"math/big" | ||||
| 	mathrand "math/rand" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	ppStartChar            = "BCDFGHJKLMNPQRSTVWXZ" | ||||
| 	ppEndChar              = "ABDEFIKMNORSTUXYZ" | ||||
| 	ppVowel                = "AEIOUY" | ||||
| 	ppConsonant            = "BCDFGHJKLMNPQRSTVWXZ" | ||||
| 	ppSegmentLenMin        = 3 | ||||
| 	ppSegmentLenMax        = 7 | ||||
| 	ppMaxRepeatedVowel     = 2 | ||||
| 	ppMaxRepeatedConsonant = 2 | ||||
| ) | ||||
|  | ||||
| var ppContinuation = map[uint8]string{ | ||||
| 	'A': "BCDFGHJKLMNPRSTVWXYZ", | ||||
| 	'B': "ADFIKLMNORSTUY", | ||||
| 	'C': "AEIKOUY", | ||||
| 	'D': "AEILORSUYZ", | ||||
| 	'E': "BCDFGHJKLMNPRSTVWXYZ", | ||||
| 	'F': "ADEGIKLOPRTUY", | ||||
| 	'G': "ABDEFHILMNORSTUY", | ||||
| 	'H': "AEIOUY", | ||||
| 	'I': "BCDFGHJKLMNPRSTVWXZ", | ||||
| 	'J': "AEIOUY", | ||||
| 	'K': "ADEFHILMNORSTUY", | ||||
| 	'L': "ADEFGIJKMNOPSTUVWYZ", | ||||
| 	'M': "ABEFIKOPSTUY", | ||||
| 	'N': "ABEFIKOPSTUY", | ||||
| 	'O': "BCDFGHJKLMNPRSTVWXYZ", | ||||
| 	'P': "AEFIJLORSTUY", | ||||
| 	'Q': "AEIOUY", | ||||
| 	'R': "ADEFGHIJKLMNOPSTUVYZ", | ||||
| 	'S': "ACDEIKLOPTUYZ", | ||||
| 	'T': "AEHIJOPRSUWY", | ||||
| 	'U': "BCDFGHJKLMNPRSTVWXZ", | ||||
| 	'V': "AEIOUY", | ||||
| 	'W': "AEIOUY", | ||||
| 	'X': "AEIOUY", | ||||
| 	'Y': "ABCDFGHKLMNPRSTVXZ", | ||||
| 	'Z': "AEILOTUY", | ||||
| } | ||||
|  | ||||
| var ppLog2Map = map[int]float64{ | ||||
| 	1:  0.00000000, | ||||
| 	2:  1.00000000, | ||||
| 	3:  1.58496250, | ||||
| 	4:  2.00000000, | ||||
| 	5:  2.32192809, | ||||
| 	6:  2.58496250, | ||||
| 	7:  2.80735492, | ||||
| 	8:  3.00000000, | ||||
| 	9:  3.16992500, | ||||
| 	10: 3.32192809, | ||||
| 	11: 3.45943162, | ||||
| 	12: 3.58496250, | ||||
| 	13: 3.70043972, | ||||
| 	14: 3.80735492, | ||||
| 	15: 3.90689060, | ||||
| 	16: 4.00000000, | ||||
| 	17: 4.08746284, | ||||
| 	18: 4.16992500, | ||||
| 	19: 4.24792751, | ||||
| 	20: 4.32192809, | ||||
| 	21: 4.39231742, | ||||
| 	22: 4.45943162, | ||||
| 	23: 4.52356196, | ||||
| 	24: 4.58496250, | ||||
| 	25: 4.64385619, | ||||
| 	26: 4.70043972, | ||||
| 	27: 4.75488750, | ||||
| 	28: 4.80735492, | ||||
| 	29: 4.85798100, | ||||
| 	30: 4.90689060, | ||||
| 	31: 4.95419631, | ||||
| 	32: 5.00000000, | ||||
| } | ||||
|  | ||||
| var ( | ||||
| 	ppVowelMap     = ppMakeSet(ppVowel) | ||||
| 	ppConsonantMap = ppMakeSet(ppConsonant) | ||||
| 	ppEndCharMap   = ppMakeSet(ppEndChar) | ||||
| ) | ||||
|  | ||||
| func ppMakeSet(v string) map[uint8]bool { | ||||
| 	mp := make(map[uint8]bool, len(v)) | ||||
| 	for _, chr := range v { | ||||
| 		mp[uint8(chr)] = true | ||||
| 	} | ||||
| 	return mp | ||||
| } | ||||
|  | ||||
| func ppRandInt(rng io.Reader, max int) int { | ||||
| 	v, err := rand.Int(rng, big.NewInt(int64(max))) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return int(v.Int64()) | ||||
| } | ||||
|  | ||||
| func ppRand(rng io.Reader, chars string, entropy *float64) uint8 { | ||||
| 	chr := chars[ppRandInt(rng, len(chars))] | ||||
|  | ||||
| 	*entropy = *entropy + ppLog2Map[len(chars)] | ||||
|  | ||||
| 	return chr | ||||
| } | ||||
|  | ||||
| func ppCharType(chr uint8) (bool, bool) { | ||||
| 	_, ok1 := ppVowelMap[chr] | ||||
| 	_, ok2 := ppConsonantMap[chr] | ||||
|  | ||||
| 	return ok1, ok2 | ||||
| } | ||||
|  | ||||
| func ppCharsetRemove(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||
| 	result := "" | ||||
| 	for _, chr := range cs { | ||||
| 		if _, ok := set[uint8(chr)]; !ok { | ||||
| 			result += string(chr) | ||||
| 		} | ||||
| 	} | ||||
| 	if result == "" && !allowEmpty { | ||||
| 		return cs | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func ppCharsetFilter(cs string, set map[uint8]bool, allowEmpty bool) string { | ||||
| 	result := "" | ||||
| 	for _, chr := range cs { | ||||
| 		if _, ok := set[uint8(chr)]; ok { | ||||
| 			result += string(chr) | ||||
| 		} | ||||
| 	} | ||||
| 	if result == "" && !allowEmpty { | ||||
| 		return cs | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func PronouncablePasswordExt(rng io.Reader, pwlen int) (string, float64) { | ||||
|  | ||||
| 	// kinda pseudo markov-chain - with a few extra rules and no weights... | ||||
|  | ||||
| 	if pwlen <= 0 { | ||||
| 		return "", 0 | ||||
| 	} | ||||
|  | ||||
| 	vowelCount := 0 | ||||
| 	consoCount := 0 | ||||
| 	entropy := float64(0) | ||||
|  | ||||
| 	startChar := ppRand(rng, ppStartChar, &entropy) | ||||
|  | ||||
| 	result := string(startChar) | ||||
| 	currentChar := startChar | ||||
|  | ||||
| 	isVowel, isConsonant := ppCharType(currentChar) | ||||
| 	if isVowel { | ||||
| 		vowelCount = 1 | ||||
| 	} | ||||
| 	if isConsonant { | ||||
| 		consoCount = ppMaxRepeatedConsonant | ||||
| 	} | ||||
|  | ||||
| 	segmentLen := 1 | ||||
|  | ||||
| 	segmentLenTarget := ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||
|  | ||||
| 	for len(result) < pwlen { | ||||
|  | ||||
| 		charset := ppContinuation[currentChar] | ||||
| 		if vowelCount >= ppMaxRepeatedVowel { | ||||
| 			charset = ppCharsetRemove(charset, ppVowelMap, false) | ||||
| 		} | ||||
| 		if consoCount >= ppMaxRepeatedConsonant { | ||||
| 			charset = ppCharsetRemove(charset, ppConsonantMap, false) | ||||
| 		} | ||||
|  | ||||
| 		lastOfSegment := false | ||||
| 		newSegment := false | ||||
|  | ||||
| 		if len(result)+1 == pwlen { | ||||
| 			// last of result | ||||
| 			charset = ppCharsetFilter(charset, ppEndCharMap, false) | ||||
| 		} else if segmentLen+1 == segmentLenTarget { | ||||
| 			// last of segment | ||||
| 			charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||
| 			if charsetNew != "" { | ||||
| 				charset = charsetNew | ||||
| 				lastOfSegment = true | ||||
| 			} | ||||
| 		} else if segmentLen >= segmentLenTarget { | ||||
| 			// (perhaps) start of new segment | ||||
| 			if _, ok := ppEndCharMap[currentChar]; ok { | ||||
| 				charset = ppStartChar | ||||
| 				newSegment = true | ||||
| 			} else { | ||||
| 				// continue segment for one more char to (hopefully) find an end-char | ||||
| 				charsetNew := ppCharsetFilter(charset, ppEndCharMap, true) | ||||
| 				if charsetNew != "" { | ||||
| 					charset = charsetNew | ||||
| 					lastOfSegment = true | ||||
| 				} | ||||
| 			} | ||||
| 		} else { | ||||
| 			// normal continuation | ||||
| 		} | ||||
|  | ||||
| 		newChar := ppRand(rng, charset, &entropy) | ||||
| 		if lastOfSegment { | ||||
| 			currentChar = newChar | ||||
| 			segmentLen++ | ||||
| 			result += strings.ToLower(string(newChar)) | ||||
| 		} else if newSegment { | ||||
| 			currentChar = newChar | ||||
| 			segmentLen = 1 | ||||
| 			result += strings.ToUpper(string(newChar)) | ||||
| 			segmentLenTarget = ppSegmentLenMin + ppRandInt(rng, ppSegmentLenMax-ppSegmentLenMin) | ||||
| 			vowelCount = 0 | ||||
| 			consoCount = 0 | ||||
| 		} else { | ||||
| 			currentChar = newChar | ||||
| 			segmentLen++ | ||||
| 			result += strings.ToLower(string(newChar)) | ||||
| 		} | ||||
|  | ||||
| 		isVowel, isConsonant := ppCharType(currentChar) | ||||
| 		if isVowel { | ||||
| 			vowelCount++ | ||||
| 			consoCount = 0 | ||||
| 		} | ||||
| 		if isConsonant { | ||||
| 			vowelCount = 0 | ||||
| 			if newSegment { | ||||
| 				consoCount = ppMaxRepeatedConsonant | ||||
| 			} else { | ||||
| 				consoCount++ | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result, entropy | ||||
| } | ||||
|  | ||||
| func PronouncablePassword(len int) string { | ||||
| 	v, _ := PronouncablePasswordExt(rand.Reader, len) | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| func PronouncablePasswordSeeded(seed int64, len int) string { | ||||
|  | ||||
| 	v, _ := PronouncablePasswordExt(mathrand.New(mathrand.NewSource(seed)), len) | ||||
| 	return v | ||||
| } | ||||
							
								
								
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								cryptext/pronouncablePassword_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| package cryptext | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"math/rand" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| func TestPronouncablePasswordExt(t *testing.T) { | ||||
| 	for i := 0; i < 20; i++ { | ||||
| 		pw, entropy := PronouncablePasswordExt(rand.New(rand.NewSource(int64(i))), 16) | ||||
| 		fmt.Printf("[%.2f] => %s\n", entropy, pw) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPronouncablePasswordSeeded(t *testing.T) { | ||||
| 	for i := 0; i < 20; i++ { | ||||
| 		pw := PronouncablePasswordSeeded(int64(i), 8) | ||||
| 		fmt.Printf("%s\n", pw) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPronouncablePassword(t *testing.T) { | ||||
| 	for i := 0; i < 20; i++ { | ||||
| 		pw := PronouncablePassword(i + 1) | ||||
| 		fmt.Printf("%s\n", pw) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestPronouncablePasswordWrongLen(t *testing.T) { | ||||
| 	PronouncablePassword(0) | ||||
| 	PronouncablePassword(-1) | ||||
| 	PronouncablePassword(-2) | ||||
| 	PronouncablePassword(-3) | ||||
| } | ||||
| @@ -68,6 +68,7 @@ func init() { | ||||
| } | ||||
|  | ||||
| type Builder struct { | ||||
| 	wrappedErr      error | ||||
| 	errorData       *ExErr | ||||
| 	containsGinData bool | ||||
| 	noLog           bool | ||||
| @@ -89,9 +90,9 @@ func Wrap(err error, msg string) *Builder { | ||||
| 	if !pkgconfig.RecursiveErrors { | ||||
| 		v := FromError(err) | ||||
| 		v.Message = msg | ||||
| 		return &Builder{errorData: v} | ||||
| 		return &Builder{wrappedErr: err, errorData: v} | ||||
| 	} | ||||
| 	return &Builder{errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | ||||
| 	return &Builder{wrappedErr: err, errorData: wrapExErr(FromError(err), msg, CatWrap, 1)} | ||||
| } | ||||
|  | ||||
| // ---------------------------------------------------------------------------- | ||||
| @@ -414,6 +415,10 @@ func extractHeader(header map[string][]string) []string { | ||||
| func (b *Builder) Build() error { | ||||
| 	warnOnPkgConfigNotInitialized() | ||||
|  | ||||
| 	if pkgconfig.DisableErrorWrapping && b.wrappedErr != nil { | ||||
| 		return b.wrappedErr | ||||
| 	} | ||||
|  | ||||
| 	if pkgconfig.ZeroLogErrTraces && !b.noLog && (b.errorData.Severity == SevErr || b.errorData.Severity == SevFatal) { | ||||
| 		b.errorData.ShortLog(stackSkipLogger.Error()) | ||||
| 	} else if pkgconfig.ZeroLogAllTraces && !b.noLog { | ||||
|   | ||||
| @@ -48,8 +48,9 @@ var ( | ||||
| 	TypeMongoReflection   = NewType("MONGO_REFLECTION", langext.Ptr(500)) | ||||
| 	TypeMongoInvalidOpt   = NewType("MONGO_INVALIDOPT", langext.Ptr(500)) | ||||
|  | ||||
| 	TypeSQLQuery = NewType("SQL_QUERY", langext.Ptr(500)) | ||||
| 	TypeSQLBuild = NewType("SQL_BUILD", langext.Ptr(500)) | ||||
| 	TypeSQLQuery  = NewType("SQL_QUERY", langext.Ptr(500)) | ||||
| 	TypeSQLBuild  = NewType("SQL_BUILD", langext.Ptr(500)) | ||||
| 	TypeSQLDecode = NewType("SQL_DECODE", langext.Ptr(500)) | ||||
|  | ||||
| 	TypeWrap = NewType("Wrap", nil) | ||||
|  | ||||
|   | ||||
| @@ -13,6 +13,7 @@ type ErrorPackageConfig struct { | ||||
| 	IncludeMetaInGinOutput bool                                             // Log meta fields ( from e.g. `.Str(key, val).Build()` ) to gin in err.Output() | ||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any)            // (Optionally) extend the gin output with more fields | ||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) // (Optionally) extend the gin `__data` output with more fields | ||||
| 	DisableErrorWrapping   bool                                             // Disables the exerr.Wrap()...Build() function - will always return the original error | ||||
| } | ||||
|  | ||||
| type ErrorPackageConfigInit struct { | ||||
| @@ -23,6 +24,7 @@ type ErrorPackageConfigInit struct { | ||||
| 	IncludeMetaInGinOutput *bool | ||||
| 	ExtendGinOutput        func(err *ExErr, json map[string]any) | ||||
| 	ExtendGinDataOutput    func(err *ExErr, depth int, json map[string]any) | ||||
| 	DisableErrorWrapping   *bool | ||||
| } | ||||
|  | ||||
| var initialized = false | ||||
| @@ -35,6 +37,7 @@ var pkgconfig = ErrorPackageConfig{ | ||||
| 	IncludeMetaInGinOutput: true, | ||||
| 	ExtendGinOutput:        func(err *ExErr, json map[string]any) {}, | ||||
| 	ExtendGinDataOutput:    func(err *ExErr, depth int, json map[string]any) {}, | ||||
| 	DisableErrorWrapping:   false, | ||||
| } | ||||
|  | ||||
| // Init initializes the exerr packages | ||||
| @@ -63,6 +66,7 @@ func Init(cfg ErrorPackageConfigInit) { | ||||
| 		IncludeMetaInGinOutput: langext.Coalesce(cfg.IncludeMetaInGinOutput, pkgconfig.IncludeMetaInGinOutput), | ||||
| 		ExtendGinOutput:        ego, | ||||
| 		ExtendGinDataOutput:    egdo, | ||||
| 		DisableErrorWrapping:   langext.Coalesce(cfg.DisableErrorWrapping, pkgconfig.DisableErrorWrapping), | ||||
| 	} | ||||
|  | ||||
| 	initialized = true | ||||
|   | ||||
| @@ -17,7 +17,7 @@ import ( | ||||
|  | ||||
| type GinWrapper struct { | ||||
| 	engine          *gin.Engine | ||||
| 	SuppressGinLogs bool | ||||
| 	suppressGinLogs bool | ||||
|  | ||||
| 	allowCors             bool | ||||
| 	ginDebug              bool | ||||
| @@ -51,7 +51,7 @@ func NewEngine(opt Options) *GinWrapper { | ||||
|  | ||||
| 	wrapper := &GinWrapper{ | ||||
| 		engine:                engine, | ||||
| 		SuppressGinLogs:       false, | ||||
| 		suppressGinLogs:       false, | ||||
| 		allowCors:             langext.Coalesce(opt.AllowCors, false), | ||||
| 		ginDebug:              langext.Coalesce(opt.GinDebug, true), | ||||
| 		bufferBody:            langext.Coalesce(opt.BufferBody, false), | ||||
| @@ -75,7 +75,7 @@ func NewEngine(opt Options) *GinWrapper { | ||||
|  | ||||
| 		ginlogger := gin.Logger() | ||||
| 		engine.Use(func(context *gin.Context) { | ||||
| 			if !wrapper.SuppressGinLogs { | ||||
| 			if !wrapper.suppressGinLogs { | ||||
| 				ginlogger(context) | ||||
| 			} | ||||
| 		}) | ||||
|   | ||||
| @@ -163,16 +163,16 @@ func (pctx PreContext) Start() (*AppContext, *gin.Context, *HTTPResponse) { | ||||
|  | ||||
| 	ictx, cancel := context.WithTimeout(context.Background(), langext.Coalesce(pctx.timeout, pctx.wrapper.requestTimeout)) | ||||
|  | ||||
| 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | ||||
|  | ||||
| 	if pctx.persistantData.sessionObj != nil { | ||||
| 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, ictx) | ||||
| 		err := pctx.persistantData.sessionObj.Init(pctx.ginCtx, actx) | ||||
| 		if err != nil { | ||||
| 			cancel() | ||||
| 			actx.Cancel() | ||||
| 			return nil, nil, langext.Ptr(Error(exerr.Wrap(err, "Failed to init session").Build())) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	actx := CreateAppContext(pctx.ginCtx, ictx, cancel) | ||||
|  | ||||
| 	return actx, pctx.ginCtx, nil | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -9,6 +9,16 @@ import ( | ||||
| 	"os" | ||||
| ) | ||||
|  | ||||
| type cookieval struct { | ||||
| 	name     string | ||||
| 	value    string | ||||
| 	maxAge   int | ||||
| 	path     string | ||||
| 	domain   string | ||||
| 	secure   bool | ||||
| 	httpOnly bool | ||||
| } | ||||
|  | ||||
| type headerval struct { | ||||
| 	Key string | ||||
| 	Val string | ||||
| @@ -17,6 +27,7 @@ type headerval struct { | ||||
| type HTTPResponse interface { | ||||
| 	Write(g *gin.Context) | ||||
| 	WithHeader(k string, v string) HTTPResponse | ||||
| 	WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse | ||||
| 	IsSuccess() bool | ||||
| } | ||||
|  | ||||
| @@ -33,6 +44,7 @@ type jsonHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	data       any | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j jsonHTTPResponse) jsonRenderer(g *gin.Context) json.GoJsonRender { | ||||
| @@ -47,6 +59,9 @@ func (j jsonHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Render(j.statusCode, j.jsonRenderer(g)) | ||||
| } | ||||
|  | ||||
| @@ -55,6 +70,11 @@ func (j jsonHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j jsonHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j jsonHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -82,12 +102,16 @@ func (j jsonHTTPResponse) Headers() []string { | ||||
| type emptyHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j emptyHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Status(j.statusCode) | ||||
| } | ||||
|  | ||||
| @@ -96,6 +120,11 @@ func (j emptyHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j emptyHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j emptyHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -120,12 +149,16 @@ type textHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	data       string | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j textHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.String(j.statusCode, "%s", j.data) | ||||
| } | ||||
|  | ||||
| @@ -134,6 +167,11 @@ func (j textHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j textHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j textHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -159,12 +197,16 @@ type dataHTTPResponse struct { | ||||
| 	data        []byte | ||||
| 	contentType string | ||||
| 	headers     []headerval | ||||
| 	cookies     []cookieval | ||||
| } | ||||
|  | ||||
| func (j dataHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Data(j.statusCode, j.contentType, j.data) | ||||
| } | ||||
|  | ||||
| @@ -173,6 +215,11 @@ func (j dataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j dataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j dataHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -198,6 +245,7 @@ type fileHTTPResponse struct { | ||||
| 	filepath string | ||||
| 	filename *string | ||||
| 	headers  []headerval | ||||
| 	cookies  []cookieval | ||||
| } | ||||
|  | ||||
| func (j fileHTTPResponse) Write(g *gin.Context) { | ||||
| @@ -209,6 +257,9 @@ func (j fileHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.File(j.filepath) | ||||
| } | ||||
|  | ||||
| @@ -217,6 +268,11 @@ func (j fileHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j fileHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j fileHTTPResponse) IsSuccess() bool { | ||||
| 	return true | ||||
| } | ||||
| @@ -247,17 +303,20 @@ type downloadDataHTTPResponse struct { | ||||
| 	data       []byte | ||||
| 	filename   *string | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j downloadDataHTTPResponse) Write(g *gin.Context) { | ||||
| 	g.Header("Content-Type", j.mimetype) // if we don't set it here gin does weird file-sniffing later... | ||||
| 	if j.filename != nil { | ||||
| 		g.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", *j.filename)) | ||||
|  | ||||
| 	} | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Data(j.statusCode, j.mimetype, j.data) | ||||
| } | ||||
|  | ||||
| @@ -266,6 +325,11 @@ func (j downloadDataHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j downloadDataHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j downloadDataHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -290,9 +354,16 @@ type redirectHTTPResponse struct { | ||||
| 	statusCode int | ||||
| 	url        string | ||||
| 	headers    []headerval | ||||
| 	cookies    []cookieval | ||||
| } | ||||
|  | ||||
| func (j redirectHTTPResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	g.Redirect(j.statusCode, j.url) | ||||
| } | ||||
|  | ||||
| @@ -301,6 +372,11 @@ func (j redirectHTTPResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j redirectHTTPResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j redirectHTTPResponse) IsSuccess() bool { | ||||
| 	return j.statusCode >= 200 && j.statusCode <= 399 | ||||
| } | ||||
| @@ -324,9 +400,16 @@ func (j redirectHTTPResponse) Headers() []string { | ||||
| type jsonAPIErrResponse struct { | ||||
| 	err     *exerr.ExErr | ||||
| 	headers []headerval | ||||
| 	cookies []cookieval | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) Write(g *gin.Context) { | ||||
| 	for _, v := range j.headers { | ||||
| 		g.Header(v.Key, v.Val) | ||||
| 	} | ||||
| 	for _, v := range j.cookies { | ||||
| 		g.SetCookie(v.name, v.value, v.maxAge, v.path, v.domain, v.secure, v.httpOnly) | ||||
| 	} | ||||
| 	j.err.Output(g) | ||||
|  | ||||
| 	j.err.CallListener(exerr.MethodOutput) | ||||
| @@ -337,6 +420,11 @@ func (j jsonAPIErrResponse) WithHeader(k string, v string) HTTPResponse { | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) WithCookie(name string, value string, maxAge int, path string, domain string, secure bool, httpOnly bool) HTTPResponse { | ||||
| 	j.cookies = append(j.cookies, cookieval{name, value, maxAge, path, domain, secure, httpOnly}) | ||||
| 	return j | ||||
| } | ||||
|  | ||||
| func (j jsonAPIErrResponse) IsSuccess() bool { | ||||
| 	return false | ||||
| } | ||||
|   | ||||
| @@ -6,6 +6,6 @@ import ( | ||||
| ) | ||||
|  | ||||
| type SessionObject interface { | ||||
| 	Init(g *gin.Context, ctx context.Context) error | ||||
| 	Init(g *gin.Context, ctx *AppContext) error | ||||
| 	Finish(ctx context.Context, resp HTTPResponse) error | ||||
| } | ||||
|   | ||||
							
								
								
									
										2
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								go.mod
									
									
									
									
									
								
							| @@ -23,7 +23,7 @@ require ( | ||||
| 	github.com/gin-contrib/sse v0.1.0 // indirect | ||||
| 	github.com/go-playground/locales v0.14.1 // indirect | ||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||
| 	github.com/go-playground/validator/v10 v10.16.0 // indirect | ||||
| 	github.com/go-playground/validator/v10 v10.17.0 // indirect | ||||
| 	github.com/goccy/go-json v0.10.2 // indirect | ||||
| 	github.com/golang/snappy v0.0.4 // indirect | ||||
| 	github.com/google/uuid v1.5.0 // indirect | ||||
|   | ||||
							
								
								
									
										2
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								go.sum
									
									
									
									
									
								
							| @@ -31,6 +31,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn | ||||
| github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= | ||||
| github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= | ||||
| github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||
| github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74= | ||||
| github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= | ||||
| github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= | ||||
| github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= | ||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| package goext | ||||
|  | ||||
| const GoextVersion = "0.0.364" | ||||
| const GoextVersion = "0.0.379" | ||||
|  | ||||
| const GoextVersionTimestamp = "2024-01-09T18:17:55+0100" | ||||
| const GoextVersionTimestamp = "2024-01-19T17:30:20+0100" | ||||
|   | ||||
							
								
								
									
										21
									
								
								langext/must.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								langext/must.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| package langext | ||||
|  | ||||
| // Must returns a value and panics on error | ||||
| // | ||||
| // Usage: Must(methodWithError(...)) | ||||
| func Must[T any](v T, err error) T { | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| // MustBool returns a value and panics on missing | ||||
| // | ||||
| // Usage: MustBool(methodWithOkayReturn(...)) | ||||
| func MustBool[T any](v T, ok bool) T { | ||||
| 	if !ok { | ||||
| 		panic("not ok") | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
| @@ -5,7 +5,7 @@ import ( | ||||
| 	"go.mongodb.org/mongo-driver/mongo" | ||||
| ) | ||||
|  | ||||
| type Filter interface { | ||||
| type MongoFilter interface { | ||||
| 	FilterQuery() mongo.Pipeline | ||||
| 	Sort() bson.D | ||||
| } | ||||
| @@ -23,6 +23,6 @@ func (d dynamicFilter) Sort() bson.D { | ||||
| 	return d.sort | ||||
| } | ||||
|  | ||||
| func CreateFilter(pipeline mongo.Pipeline, sort bson.D) Filter { | ||||
| func CreateFilter(pipeline mongo.Pipeline, sort bson.D) MongoFilter { | ||||
| 	return dynamicFilter{pipeline: pipeline, sort: sort} | ||||
| } | ||||
|   | ||||
							
								
								
									
										98
									
								
								reflectext/mapAccess.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										98
									
								
								reflectext/mapAccess.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,98 @@ | ||||
| package reflectext | ||||
|  | ||||
| import ( | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| // GetMapPath returns the value deep inside a hierahically nested map structure | ||||
| // eg: | ||||
| // x := langext.H{"K1": langext.H{"K2": 665}} | ||||
| // GetMapPath[int](x, "K1.K2") == 665 | ||||
| func GetMapPath[TData any](mapval any, path string) (TData, bool) { | ||||
| 	var ok bool | ||||
|  | ||||
| 	split := strings.Split(path, ".") | ||||
|  | ||||
| 	for i, key := range split { | ||||
|  | ||||
| 		if i < len(split)-1 { | ||||
| 			mapval, ok = GetMapField[any](mapval, key) | ||||
| 			if !ok { | ||||
| 				return *new(TData), false | ||||
| 			} | ||||
| 		} else { | ||||
| 			return GetMapField[TData](mapval, key) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return *new(TData), false | ||||
| } | ||||
|  | ||||
| // GetMapField gets the value of a map, without knowing the actual types (mapval is any) | ||||
| // eg: | ||||
| // x := langext.H{"K1": 665} | ||||
| // GetMapPath[int](x, "K1") == 665 | ||||
| // | ||||
| // works with aliased types and autom. dereferences pointes | ||||
| func GetMapField[TData any, TKey comparable](mapval any, key TKey) (TData, bool) { | ||||
|  | ||||
| 	rval := reflect.ValueOf(mapval) | ||||
|  | ||||
| 	for rval.Kind() == reflect.Ptr && !rval.IsNil() { | ||||
| 		rval = rval.Elem() | ||||
| 	} | ||||
|  | ||||
| 	if rval.Kind() != reflect.Map { | ||||
| 		return *new(TData), false // mapval is not a map | ||||
| 	} | ||||
|  | ||||
| 	kval := reflect.ValueOf(key) | ||||
|  | ||||
| 	if !kval.Type().AssignableTo(rval.Type().Key()) { | ||||
| 		return *new(TData), false // key cannot index mapval | ||||
| 	} | ||||
|  | ||||
| 	eval := rval.MapIndex(kval) | ||||
| 	if !eval.IsValid() { | ||||
| 		return *new(TData), false // key does not exist in mapval | ||||
| 	} | ||||
|  | ||||
| 	destType := reflect.TypeOf(new(TData)).Elem() | ||||
|  | ||||
| 	if eval.Type() == destType { | ||||
| 		return eval.Interface().(TData), true | ||||
| 	} | ||||
|  | ||||
| 	if eval.CanConvert(destType) && !preventConvert(eval.Type(), destType) { | ||||
| 		return eval.Convert(destType).Interface().(TData), true | ||||
| 	} | ||||
|  | ||||
| 	if (eval.Kind() == reflect.Ptr || eval.Kind() == reflect.Interface) && eval.IsNil() && destType.Kind() == reflect.Ptr { | ||||
| 		return *new(TData), false // special case: mapval[key] is nil | ||||
| 	} | ||||
|  | ||||
| 	for (eval.Kind() == reflect.Ptr || eval.Kind() == reflect.Interface) && !eval.IsNil() { | ||||
| 		eval = eval.Elem() | ||||
|  | ||||
| 		if eval.Type() == destType { | ||||
| 			return eval.Interface().(TData), true | ||||
| 		} | ||||
|  | ||||
| 		if eval.CanConvert(destType) && !preventConvert(eval.Type(), destType) { | ||||
| 			return eval.Convert(destType).Interface().(TData), true | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return *new(TData), false // mapval[key] is not of type TData | ||||
| } | ||||
|  | ||||
| func preventConvert(t1 reflect.Type, t2 reflect.Type) bool { | ||||
| 	if t1.Kind() == reflect.String && t1.Kind() != reflect.String { | ||||
| 		return true | ||||
| 	} | ||||
| 	if t2.Kind() == reflect.String && t1.Kind() != reflect.String { | ||||
| 		return true | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
							
								
								
									
										55
									
								
								reflectext/mapAccess_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								reflectext/mapAccess_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | ||||
| package reflectext | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/tst" | ||||
| 	"testing" | ||||
| ) | ||||
|  | ||||
| func TestGetMapPath(t *testing.T) { | ||||
| 	type PseudoInt = int64 | ||||
|  | ||||
| 	mymap2 := map[string]map[string]any{"Test": {"Second": 3}} | ||||
|  | ||||
| 	var maany2 any = mymap2 | ||||
|  | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test.Second")), "3 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test2.Second")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[int](maany2, "Test.Second2")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test.Second")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test2.Second")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[string](maany2, "Test.Second2")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test.Second")), "3 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test2.Second")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapPath[PseudoInt](maany2, "Test.Second2")), "0 false") | ||||
| } | ||||
|  | ||||
| func TestGetMapField(t *testing.T) { | ||||
| 	type PseudoInt = int64 | ||||
|  | ||||
| 	mymap1 := map[string]any{"Test": 12} | ||||
| 	mymap2 := map[string]int{"Test": 12} | ||||
|  | ||||
| 	var maany1 any = mymap1 | ||||
| 	var maany2 any = mymap2 | ||||
|  | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany1, "Test")), "12 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany1, "Test2")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany1, "Test")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany1, "Test2")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany1, "Test")), "12 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany1, "Test2")), "0 false") | ||||
|  | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany2, "Test")), "12 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[int](maany2, "Test2")), "0 false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany2, "Test")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[string](maany2, "Test2")), "false") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test")), "12 true") | ||||
| 	tst.AssertEqual(t, fmt.Sprint(GetMapField[PseudoInt](maany2, "Test2")), "0 false") | ||||
| } | ||||
|  | ||||
| func main2() { | ||||
| } | ||||
|  | ||||
| func main() { | ||||
| } | ||||
| @@ -53,7 +53,7 @@ func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn strin | ||||
| 				return "", nil, err | ||||
| 			} | ||||
|  | ||||
| 			setClauses = append(setClauses, fmt.Sprintf("(%s = :%s)", columnName, params.Add(val))) | ||||
| 			setClauses = append(setClauses, fmt.Sprintf("%s = :%s", columnName, params.Add(val))) | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
| @@ -69,3 +69,52 @@ func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn strin | ||||
| 	//goland:noinspection SqlNoDataSourceInspection | ||||
| 	return fmt.Sprintf("UPDATE %s SET %s WHERE %s", tableName, strings.Join(setClauses, ", "), matchClause), params, nil | ||||
| } | ||||
|  | ||||
| func BuildInsertStatement(q Queryable, tableName string, obj any) (string, PP, error) { | ||||
| 	rval := reflect.ValueOf(obj) | ||||
| 	rtyp := rval.Type() | ||||
|  | ||||
| 	params := PP{} | ||||
|  | ||||
| 	fields := make([]string, 0) | ||||
| 	values := make([]string, 0) | ||||
|  | ||||
| 	for i := 0; i < rtyp.NumField(); i++ { | ||||
|  | ||||
| 		rsfield := rtyp.Field(i) | ||||
| 		rvfield := rval.Field(i) | ||||
|  | ||||
| 		if !rsfield.IsExported() { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		columnName := rsfield.Tag.Get("db") | ||||
| 		if columnName == "" || columnName == "-" { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		if rsfield.Type.Kind() == reflect.Ptr && rvfield.IsNil() { | ||||
|  | ||||
| 			fields = append(fields, columnName) | ||||
| 			values = append(values, "NULL") | ||||
|  | ||||
| 		} else { | ||||
|  | ||||
| 			val, err := convertValueToDB(q, rvfield.Interface()) | ||||
| 			if err != nil { | ||||
| 				return "", nil, err | ||||
| 			} | ||||
|  | ||||
| 			fields = append(fields, columnName) | ||||
| 			values = append(values, ":"+params.Add(val)) | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if len(fields) == 0 { | ||||
| 		return "", nil, exerr.New(exerr.TypeSQLBuild, "no fields found in object").Build() | ||||
| 	} | ||||
|  | ||||
| 	//goland:noinspection SqlNoDataSourceInspection | ||||
| 	return fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", tableName, strings.Join(fields, ", "), strings.Join(values, ", ")), params, nil | ||||
| } | ||||
|   | ||||
| @@ -18,9 +18,9 @@ type DBTypeConverter interface { | ||||
| 	DBToModel(v any) (any, error) | ||||
| } | ||||
|  | ||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int](func(v bool) (int, error) { | ||||
| 	return langext.Conditional(v, 1, 0), nil | ||||
| }, func(v int) (bool, error) { | ||||
| var ConverterBoolToBit = NewDBTypeConverter[bool, int64](func(v bool) (int64, error) { | ||||
| 	return langext.Conditional(v, int64(1), int64(0)), nil | ||||
| }, func(v int64) (bool, error) { | ||||
| 	if v == 0 { | ||||
| 		return false, nil | ||||
| 	} | ||||
|   | ||||
| @@ -4,6 +4,7 @@ import ( | ||||
| 	"context" | ||||
| 	"database/sql" | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"sync" | ||||
| ) | ||||
| @@ -45,7 +46,7 @@ func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Resul | ||||
| 	for _, v := range db.lstr { | ||||
| 		err := v.PreExec(ctx, nil, &sqlstr, &prep) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -56,7 +57,7 @@ func (db *database) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Resul | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
| 	return res, nil | ||||
| } | ||||
| @@ -66,7 +67,7 @@ func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Ro | ||||
| 	for _, v := range db.lstr { | ||||
| 		err := v.PreQuery(ctx, nil, &sqlstr, &prep) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -77,7 +78,7 @@ func (db *database) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx.Ro | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
| 	return rows, nil | ||||
| } | ||||
| @@ -97,7 +98,7 @@ func (db *database) Ping(ctx context.Context) error { | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 		return exerr.Wrap(err, "Failed to [ping] sql database").Build() | ||||
| 	} | ||||
| 	return nil | ||||
| } | ||||
| @@ -117,7 +118,7 @@ func (db *database) BeginTransaction(ctx context.Context, iso sql.IsolationLevel | ||||
|  | ||||
| 	xtx, err := db.db.BeginTxx(ctx, &sql.TxOptions{Isolation: iso}) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to start sql transaction").Build() | ||||
| 	} | ||||
|  | ||||
| 	for _, v := range db.lstr { | ||||
|   | ||||
							
								
								
									
										126
									
								
								sq/paginate.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										126
									
								
								sq/paginate.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,126 @@ | ||||
| package sq | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||
| ) | ||||
|  | ||||
| type PaginateFilter interface { | ||||
| 	SQL(params PP) (filterClause string, joinClause string, joinTables []string) | ||||
| 	Sort() []FilterSort | ||||
| } | ||||
|  | ||||
| type FilterSort struct { | ||||
| 	Field     string | ||||
| 	Direction ct.SortDirection | ||||
| } | ||||
|  | ||||
| func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||
| 	prepParams := PP{} | ||||
|  | ||||
| 	sortOrder := filter.Sort() | ||||
| 	sortCond := "" | ||||
| 	if len(sortOrder) > 0 { | ||||
| 		sortCond = "ORDER BY " | ||||
| 		for i, v := range sortOrder { | ||||
| 			if i > 0 { | ||||
| 				sortCond += ", " | ||||
| 			} | ||||
| 			sortCond += v.Field + " " + string(v.Direction) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	pageCond := "" | ||||
| 	if limit != nil { | ||||
| 		pageCond += fmt.Sprintf("LIMIT :%s OFFSET :%s", prepParams.Add(*limit+1), prepParams.Add(*limit*(page-1))) | ||||
| 	} | ||||
|  | ||||
| 	filterCond, joinCond, joinTables := filter.SQL(prepParams) | ||||
|  | ||||
| 	selectCond := table + ".*" | ||||
| 	for _, v := range joinTables { | ||||
| 		selectCond += ", " + v + ".*" | ||||
| 	} | ||||
|  | ||||
| 	sqlQueryData := "SELECT " + selectCond + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " + sortCond + " " + pageCond | ||||
| 	sqlQueryCount := "SELECT " + "COUNT(*)" + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " ) " | ||||
|  | ||||
| 	rows, err := q.Query(ctx, sqlQueryData, prepParams) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to list paginated entries from DB").Str("table", table).Any("filter", filter).Int("page", page).Any("limit", limit).Build() | ||||
| 	} | ||||
|  | ||||
| 	entities, err := ScanAll[TData](ctx, q, rows, scanMode, scanSec, true) | ||||
| 	if err != nil { | ||||
| 		return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode paginated entries from DB").Str("table", table).Int("page", page).Any("limit", limit).Str("scanMode", string(scanMode)).Str("scanSec", string(scanSec)).Build() | ||||
| 	} | ||||
|  | ||||
| 	if page == 1 && (limit == nil || len(entities) <= *limit) { | ||||
| 		return entities, pag.Pagination{ | ||||
| 			Page:             1, | ||||
| 			Limit:            langext.Coalesce(limit, len(entities)), | ||||
| 			TotalPages:       1, | ||||
| 			TotalItems:       len(entities), | ||||
| 			CurrentPageCount: 1, | ||||
| 		}, nil | ||||
| 	} else { | ||||
|  | ||||
| 		countRows, err := q.Query(ctx, sqlQueryCount, prepParams) | ||||
| 		if err != nil { | ||||
| 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to query total-count of paginated entries from DB").Str("table", table).Build() | ||||
| 		} | ||||
|  | ||||
| 		if !countRows.Next() { | ||||
| 			return nil, pag.Pagination{}, exerr.New(exerr.TypeSQLDecode, "SQL COUNT(*) query returned no rows").Str("table", table).Any("filter", filter).Build() | ||||
| 		} | ||||
|  | ||||
| 		var countRes int | ||||
| 		err = countRows.Scan(&countRes) | ||||
| 		if err != nil { | ||||
| 			return nil, pag.Pagination{}, exerr.Wrap(err, "failed to decode total-count of paginated entries from DB").Str("table", table).Build() | ||||
| 		} | ||||
|  | ||||
| 		if len(entities) > *limit { | ||||
| 			entities = entities[:*limit] | ||||
| 		} | ||||
|  | ||||
| 		paginationObj := pag.Pagination{ | ||||
| 			Page:             page, | ||||
| 			Limit:            langext.Coalesce(limit, countRes), | ||||
| 			TotalPages:       pag.CalcPaginationTotalPages(countRes, langext.Coalesce(limit, countRes)), | ||||
| 			TotalItems:       countRes, | ||||
| 			CurrentPageCount: len(entities), | ||||
| 		} | ||||
|  | ||||
| 		return entities, paginationObj, nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Count(ctx context.Context, q Queryable, table string, filter PaginateFilter) (int, error) { | ||||
| 	prepParams := PP{} | ||||
|  | ||||
| 	filterCond, joinCond, _ := filter.SQL(prepParams) | ||||
|  | ||||
| 	sqlQueryCount := "SELECT " + "COUNT(*)" + " FROM " + table + " " + joinCond + " WHERE ( " + filterCond + " )" | ||||
|  | ||||
| 	countRows, err := q.Query(ctx, sqlQueryCount, prepParams) | ||||
| 	if err != nil { | ||||
| 		return 0, exerr.Wrap(err, "failed to query count of entries from DB").Str("table", table).Build() | ||||
| 	} | ||||
|  | ||||
| 	if !countRows.Next() { | ||||
| 		return 0, exerr.New(exerr.TypeSQLDecode, "SQL COUNT(*) query returned no rows").Str("table", table).Any("filter", filter).Build() | ||||
| 	} | ||||
|  | ||||
| 	var countRes int | ||||
| 	err = countRows.Scan(&countRes) | ||||
| 	if err != nil { | ||||
| 		return 0, exerr.Wrap(err, "failed to decode count of entries from DB").Str("table", table).Build() | ||||
| 	} | ||||
|  | ||||
| 	return countRes, nil | ||||
| } | ||||
| @@ -93,3 +93,62 @@ func TestTypeConverter2(t *testing.T) { | ||||
| 	tst.AssertEqual(t, "002", r.ID) | ||||
| 	tst.AssertEqual(t, t0.UnixNano(), r.Timestamp.UnixNano()) | ||||
| } | ||||
|  | ||||
| func TestTypeConverter3(t *testing.T) { | ||||
|  | ||||
| 	if !langext.InArray("sqlite3", sql.Drivers()) { | ||||
| 		sqlite.RegisterAsSQLITE3() | ||||
| 	} | ||||
|  | ||||
| 	type RequestData struct { | ||||
| 		ID        string                 `db:"id"` | ||||
| 		Timestamp *rfctime.UnixMilliTime `db:"timestamp"` | ||||
| 	} | ||||
|  | ||||
| 	ctx := context.Background() | ||||
|  | ||||
| 	dbdir := t.TempDir() | ||||
| 	dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3") | ||||
|  | ||||
| 	tst.AssertNoErr(t, os.MkdirAll(dbdir, os.ModePerm)) | ||||
|  | ||||
| 	url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000) | ||||
|  | ||||
| 	xdb := tst.Must(sqlx.Open("sqlite", url))(t) | ||||
|  | ||||
| 	db := NewDB(xdb) | ||||
| 	db.RegisterDefaultConverter() | ||||
|  | ||||
| 	_, err := db.Exec(ctx, "CREATE TABLE `requests` ( id TEXT NOT NULL, timestamp INTEGER NULL, PRIMARY KEY (id) ) STRICT", PP{}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	t0 := rfctime.NewUnixMilli(time.Date(2012, 03, 01, 16, 0, 0, 0, time.UTC)) | ||||
|  | ||||
| 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||
| 		ID:        "001", | ||||
| 		Timestamp: &t0, | ||||
| 	}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	_, err = InsertSingle(ctx, db, "requests", RequestData{ | ||||
| 		ID:        "002", | ||||
| 		Timestamp: nil, | ||||
| 	}) | ||||
| 	tst.AssertNoErr(t, err) | ||||
|  | ||||
| 	{ | ||||
| 		r1, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '001'", PP{}, SModeExtended, Safe) | ||||
| 		tst.AssertNoErr(t, err) | ||||
| 		fmt.Printf("%+v\n", r1) | ||||
| 		tst.AssertEqual(t, "001", r1.ID) | ||||
| 		tst.AssertEqual(t, t0.UnixNano(), r1.Timestamp.UnixNano()) | ||||
| 	} | ||||
|  | ||||
| 	{ | ||||
| 		r2, err := QuerySingle[RequestData](ctx, db, "SELECT * FROM requests WHERE id = '002'", PP{}, SModeExtended, Safe) | ||||
| 		tst.AssertNoErr(t, err) | ||||
| 		fmt.Printf("%+v\n", r2) | ||||
| 		tst.AssertEqual(t, "002", r2.ID) | ||||
| 		tst.AssertEqual(t, nil, r2.Timestamp) | ||||
| 	} | ||||
| } | ||||
|   | ||||
| @@ -7,6 +7,7 @@ import ( | ||||
| 	"github.com/jmoiron/sqlx/reflectx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| 	"reflect" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| // forked from sqlx, but added ability to unmarshal optional-nested structs | ||||
| @@ -18,7 +19,7 @@ type StructScanner struct { | ||||
|  | ||||
| 	fields    [][]int | ||||
| 	values    []any | ||||
| 	converter []DBTypeConverter | ||||
| 	converter []ssConverter | ||||
| 	columns   []string | ||||
| } | ||||
|  | ||||
| @@ -30,6 +31,11 @@ func NewStructScanner(rows *sqlx.Rows, unsafe bool) *StructScanner { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| type ssConverter struct { | ||||
| 	Converter DBTypeConverter | ||||
| 	RefCount  int | ||||
| } | ||||
|  | ||||
| func (r *StructScanner) Start(dest any) error { | ||||
| 	v := reflect.ValueOf(dest) | ||||
|  | ||||
| @@ -49,7 +55,7 @@ func (r *StructScanner) Start(dest any) error { | ||||
| 		return fmt.Errorf("missing destination name %s in %T", columns[f], dest) | ||||
| 	} | ||||
| 	r.values = make([]interface{}, len(columns)) | ||||
| 	r.converter = make([]DBTypeConverter, len(columns)) | ||||
| 	r.converter = make([]ssConverter, len(columns)) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
| @@ -143,13 +149,19 @@ func (r *StructScanner) StructScanExt(q Queryable, dest any) error { | ||||
|  | ||||
| 			f.Set(reflect.Zero(f.Type())) // set to nil | ||||
| 		} else { | ||||
| 			if r.converter[i] != nil { | ||||
| 				val3 := val2.Elem().Interface() | ||||
| 				conv3, err := r.converter[i].DBToModel(val3) | ||||
| 			if r.converter[i].Converter != nil { | ||||
| 				val3 := val2.Elem() | ||||
| 				conv3, err := r.converter[i].Converter.DBToModel(val3.Interface()) | ||||
| 				if err != nil { | ||||
| 					return err | ||||
| 				} | ||||
| 				f.Set(reflect.ValueOf(conv3)) | ||||
| 				conv3RVal := reflect.ValueOf(conv3) | ||||
| 				for j := 0; j < r.converter[i].RefCount; j++ { | ||||
| 					newConv3Val := reflect.New(conv3RVal.Type()) | ||||
| 					newConv3Val.Elem().Set(conv3RVal) | ||||
| 					conv3RVal = newConv3Val | ||||
| 				} | ||||
| 				f.Set(conv3RVal) | ||||
| 			} else { | ||||
| 				f.Set(val2.Elem()) | ||||
| 			} | ||||
| @@ -184,7 +196,7 @@ func (r *StructScanner) StructScanBase(dest any) error { | ||||
| } | ||||
|  | ||||
| // fieldsByTraversal forked from github.com/jmoiron/sqlx@v1.3.5/sqlx.go | ||||
| func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, values []interface{}, converter []DBTypeConverter) error { | ||||
| func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, values []interface{}, converter []ssConverter) error { | ||||
| 	v = reflect.Indirect(v) | ||||
| 	if v.Kind() != reflect.Struct { | ||||
| 		return errors.New("argument not a struct") | ||||
| @@ -205,14 +217,26 @@ func fieldsByTraversalExtended(q Queryable, v reflect.Value, traversals [][]int, | ||||
| 				_v := langext.Ptr[any](nil) | ||||
| 				values[i] = _v | ||||
| 				foundConverter = true | ||||
| 				converter[i] = conv | ||||
| 				converter[i] = ssConverter{Converter: conv, RefCount: 0} | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 		if !foundConverter { | ||||
| 			// also allow non-pointer converter for pointer-types | ||||
| 			for _, conv := range q.ListConverter() { | ||||
| 				if conv.ModelTypeString() == strings.TrimLeft(typeStr, "*") { | ||||
| 					_v := langext.Ptr[any](nil) | ||||
| 					values[i] = _v | ||||
| 					foundConverter = true | ||||
| 					converter[i] = ssConverter{Converter: conv, RefCount: len(typeStr) - len(strings.TrimLeft(typeStr, "*"))} // kind hacky way to get the amount of ptr before <f>, but it works... | ||||
| 					break | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		if !foundConverter { | ||||
| 			values[i] = reflect.New(reflect.PointerTo(f.Type())).Interface() | ||||
| 			converter[i] = nil | ||||
| 			converter[i] = ssConverter{Converter: nil, RefCount: -1} | ||||
| 		} | ||||
| 	} | ||||
| 	return nil | ||||
|   | ||||
| @@ -4,6 +4,7 @@ import ( | ||||
| 	"context" | ||||
| 	"database/sql" | ||||
| 	"github.com/jmoiron/sqlx" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/exerr" | ||||
| 	"gogs.mikescher.com/BlackForestBytes/goext/langext" | ||||
| ) | ||||
|  | ||||
| @@ -48,7 +49,7 @@ func (tx *transaction) Rollback() error { | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreTxRollback(tx.id) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 			return exerr.Wrap(err, "failed to call SQL pre-rollback listener").Int("tx.id", int(tx.id)).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -69,7 +70,7 @@ func (tx *transaction) Commit() error { | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreTxCommit(tx.id) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 			return exerr.Wrap(err, "failed to call SQL pre-commit listener").Int("tx.id", int(tx.id)).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -91,7 +92,7 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreExec(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-exec listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -106,7 +107,7 @@ func (tx *transaction) Exec(ctx context.Context, sqlstr string, prep PP) (sql.Re | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to [exec] sql statement").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
| 	return res, nil | ||||
| } | ||||
| @@ -116,7 +117,7 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx | ||||
| 	for _, v := range tx.db.lstr { | ||||
| 		err := v.PreQuery(ctx, langext.Ptr(tx.id), &sqlstr, &prep) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 			return nil, exerr.Wrap(err, "failed to call SQL pre-query listener").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @@ -131,7 +132,7 @@ func (tx *transaction) Query(ctx context.Context, sqlstr string, prep PP) (*sqlx | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 		return nil, exerr.Wrap(err, "Failed to [query] sql statement").Int("tx.id", int(tx.id)).Str("original_sql", origsql).Str("sql", sqlstr).Any("sql_params", prep).Build() | ||||
| 	} | ||||
| 	return rows, nil | ||||
| } | ||||
|   | ||||
| @@ -9,7 +9,7 @@ import ( | ||||
| 	pag "gogs.mikescher.com/BlackForestBytes/goext/pagination" | ||||
| ) | ||||
|  | ||||
| func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.Filter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||
| func (c *Coll[TData]) Paginate(ctx context.Context, filter pag.MongoFilter, page int, limit *int) ([]TData, pag.Pagination, error) { | ||||
| 	type totalCountResult struct { | ||||
| 		Count int `bson:"count"` | ||||
| 	} | ||||
|   | ||||
		Reference in New Issue
	
	Block a user