Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
8edc067a3b
|
|||
1007f2c834
|
|||
c25da03217
|
|||
4b55dbaacf
|
|||
c399fa42ae
|
|||
9e586f7706
|
|||
3cc8dccc63
|
|||
7fedfbca81
|
|||
3c439ba428
|
|||
ad24f6db44
|
|||
1869ff3d75
|
|||
30ce8c4b60
|
|||
885bb53244 |
@@ -53,15 +53,11 @@ func (w *GinRoutesWrapper) Group(relativePath string) *GinRoutesWrapper {
|
||||
func (w *GinRoutesWrapper) Use(middleware ...gin.HandlerFunc) *GinRoutesWrapper {
|
||||
defHandler := langext.ArrCopy(w.defaultHandler)
|
||||
defHandler = append(defHandler, middleware...)
|
||||
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler}
|
||||
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler, absPath: w.absPath}
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) WithJSONFilter(filter string) *GinRoutesWrapper {
|
||||
defHandler := langext.ArrCopy(w.defaultHandler)
|
||||
defHandler = append(defHandler, func(g *gin.Context) {
|
||||
g.Set("goext.jsonfilter", filter)
|
||||
})
|
||||
return &GinRoutesWrapper{wrapper: w.wrapper, routes: w.routes, defaultHandler: defHandler}
|
||||
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
|
||||
}
|
||||
|
||||
func (w *GinRoutesWrapper) GET(relativePath string) *GinRouteBuilder {
|
||||
@@ -116,10 +112,7 @@ func (w *GinRouteBuilder) Use(middleware ...gin.HandlerFunc) *GinRouteBuilder {
|
||||
}
|
||||
|
||||
func (w *GinRouteBuilder) WithJSONFilter(filter string) *GinRouteBuilder {
|
||||
w.handlers = append(w.handlers, func(g *gin.Context) {
|
||||
g.Set("goext.jsonfilter", filter)
|
||||
})
|
||||
return w
|
||||
return w.Use(func(g *gin.Context) { g.Set("goext.jsonfilter", filter) })
|
||||
}
|
||||
|
||||
func (w *GinRouteBuilder) Handle(handler WHandlerFunc) {
|
||||
|
10
go.mod
10
go.mod
@@ -8,14 +8,14 @@ require (
|
||||
github.com/jmoiron/sqlx v1.3.5
|
||||
github.com/rs/xid v1.5.0
|
||||
github.com/rs/zerolog v1.32.0
|
||||
go.mongodb.org/mongo-driver v1.13.1
|
||||
go.mongodb.org/mongo-driver v1.14.0
|
||||
golang.org/x/crypto v0.19.0
|
||||
golang.org/x/sys v0.17.0
|
||||
golang.org/x/term v0.17.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/bytedance/sonic v1.10.2 // indirect
|
||||
github.com/bytedance/sonic v1.11.0 // indirect
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||
github.com/chenzhuoyu/iasm v0.9.1 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
@@ -23,13 +23,13 @@ require (
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.17.0 // indirect
|
||||
github.com/go-playground/validator/v10 v10.18.0 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/google/uuid v1.5.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.17.6 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.6 // indirect
|
||||
github.com/klauspost/compress v1.17.7 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
|
10
go.sum
10
go.sum
@@ -2,6 +2,8 @@ github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1
|
||||
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
||||
github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
|
||||
github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
||||
github.com/bytedance/sonic v1.11.0 h1:FwNNv6Vu4z2Onf1++LNzxB/QhitD8wuTdpZzMTGITWo=
|
||||
github.com/bytedance/sonic v1.11.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
|
||||
@@ -33,6 +35,8 @@ github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqR
|
||||
github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||
github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74=
|
||||
github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||
github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U=
|
||||
github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
@@ -59,9 +63,13 @@ github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW
|
||||
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
|
||||
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||
github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg=
|
||||
github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
|
||||
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
|
||||
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||
@@ -126,6 +134,8 @@ github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk=
|
||||
go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo=
|
||||
go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80=
|
||||
go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
|
||||
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
|
@@ -1,5 +1,5 @@
|
||||
package goext
|
||||
|
||||
const GoextVersion = "0.0.382"
|
||||
const GoextVersion = "0.0.394"
|
||||
|
||||
const GoextVersionTimestamp = "2024-02-09T12:25:01+0100"
|
||||
const GoextVersionTimestamp = "2024-02-21T18:40:42+0100"
|
||||
|
@@ -265,6 +265,15 @@ func ArrFirstIndex[T comparable](arr []T, needle T) int {
|
||||
return -1
|
||||
}
|
||||
|
||||
func ArrFirstIndexFunc[T any](arr []T, comp func(v T) bool) int {
|
||||
for i, v := range arr {
|
||||
if comp(v) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func ArrLastIndex[T comparable](arr []T, needle T) int {
|
||||
result := -1
|
||||
for i, v := range arr {
|
||||
@@ -275,6 +284,16 @@ func ArrLastIndex[T comparable](arr []T, needle T) int {
|
||||
return result
|
||||
}
|
||||
|
||||
func ArrLastIndexFunc[T any](arr []T, comp func(v T) bool) int {
|
||||
result := -1
|
||||
for i, v := range arr {
|
||||
if comp(v) {
|
||||
result = i
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func AddToSet[T comparable](set []T, add T) []T {
|
||||
for _, v := range set {
|
||||
if v == add {
|
||||
@@ -479,3 +498,33 @@ func JoinString(arr []string, delimiter string) string {
|
||||
|
||||
return str
|
||||
}
|
||||
|
||||
// ArrChunk splits the array into buckets of max-size `chunkSize`
|
||||
// order is being kept.
|
||||
// The last chunk may contain less than length elements.
|
||||
//
|
||||
// (chunkSize == -1) means no chunking
|
||||
//
|
||||
// see https://www.php.net/manual/en/function.array-chunk.php
|
||||
func ArrChunk[T any](arr []T, chunkSize int) [][]T {
|
||||
if chunkSize == -1 {
|
||||
return [][]T{arr}
|
||||
}
|
||||
|
||||
res := make([][]T, 0, 1+len(arr)/chunkSize)
|
||||
|
||||
i := 0
|
||||
for i < len(arr) {
|
||||
|
||||
right := i + chunkSize
|
||||
if right >= len(arr) {
|
||||
right = len(arr)
|
||||
}
|
||||
|
||||
res = append(res, arr[i:right])
|
||||
|
||||
i = right
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
45
langext/url_test.go
Normal file
45
langext/url_test.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package langext
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBuildUrl(t *testing.T) {
|
||||
tests := []struct {
|
||||
Url string
|
||||
Path string
|
||||
Params *map[string]string
|
||||
Want string
|
||||
}{
|
||||
{
|
||||
Url: "https://test.heydyno.de/",
|
||||
Path: "/testing-01",
|
||||
Params: &map[string]string{"param1": "value1"},
|
||||
Want: "https://test.heydyno.de/testing-01?param1=value1",
|
||||
},
|
||||
{
|
||||
Url: "https://test.heydyno.de",
|
||||
Path: "testing-01",
|
||||
Params: &map[string]string{"param1": "value1"},
|
||||
Want: "https://test.heydyno.de/testing-01?param1=value1",
|
||||
},
|
||||
{
|
||||
Url: "https://test.heydyno.de",
|
||||
Path: "/testing-01",
|
||||
Params: nil,
|
||||
Want: "https://test.heydyno.de/testing-01",
|
||||
},
|
||||
{
|
||||
Url: "https://test.heydyno.de/",
|
||||
Path: "testing-01",
|
||||
Params: nil,
|
||||
Want: "https://test.heydyno.de/testing-01",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
res := BuildUrl(test.Url, test.Path, test.Params)
|
||||
tst.AssertEqual(t, res, test.Want)
|
||||
}
|
||||
}
|
148
rfctime/time.go
Normal file
148
rfctime/time.go
Normal file
@@ -0,0 +1,148 @@
|
||||
package rfctime
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Time struct {
|
||||
Hour int
|
||||
Minute int
|
||||
Second int
|
||||
NanoSecond int
|
||||
}
|
||||
|
||||
func (t Time) Serialize() string {
|
||||
return fmt.Sprintf("%04d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond)
|
||||
}
|
||||
|
||||
func (t Time) SerializeShort() string {
|
||||
if t.NanoSecond == 0 && t.Second == 0 {
|
||||
return fmt.Sprintf("%02d:%02d", t.Hour, t.Minute)
|
||||
} else if t.NanoSecond == 0 {
|
||||
return fmt.Sprintf("%02d:%02d:%02d", t.Hour, t.Minute, t.Second)
|
||||
} else {
|
||||
return fmt.Sprintf("%02d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Time) Deserialize(v string) error {
|
||||
|
||||
var h, m, s, ns string
|
||||
|
||||
split1 := strings.Split(v, ".")
|
||||
|
||||
if len(split1) == 2 {
|
||||
|
||||
split2 := strings.Split(split1[0], ":")
|
||||
if len(split2) == 3 {
|
||||
|
||||
h = split2[0]
|
||||
m = split2[1]
|
||||
s = split2[2]
|
||||
ns = split1[1]
|
||||
|
||||
} else {
|
||||
return fmt.Errorf("invalid time format: %s", v)
|
||||
}
|
||||
|
||||
} else if len(split1) == 1 {
|
||||
|
||||
split2 := strings.Split(split1[0], ":")
|
||||
if len(split2) == 2 {
|
||||
|
||||
h = split2[0]
|
||||
m = split2[1]
|
||||
s = "0"
|
||||
ns = "0"
|
||||
|
||||
} else if len(split2) == 3 {
|
||||
|
||||
h = split2[0]
|
||||
m = split2[1]
|
||||
s = split2[2]
|
||||
ns = "0"
|
||||
|
||||
} else {
|
||||
return fmt.Errorf("invalid time format: %s", v)
|
||||
}
|
||||
|
||||
} else {
|
||||
return fmt.Errorf("invalid time format: %s", v)
|
||||
}
|
||||
|
||||
ns = langext.StrPadRight(ns, "0", 9)
|
||||
|
||||
hh, err := strconv.ParseInt(h, 10, 32)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid time format: %s", v)
|
||||
}
|
||||
|
||||
mm, err := strconv.ParseInt(m, 10, 32)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid time format: %s", v)
|
||||
}
|
||||
|
||||
ss, err := strconv.ParseInt(s, 10, 32)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid time format: %s", v)
|
||||
}
|
||||
|
||||
nss, err := strconv.ParseInt(ns, 10, 32)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid time format: %s", v)
|
||||
}
|
||||
|
||||
t.Hour = int(hh)
|
||||
t.Minute = int(mm)
|
||||
t.Second = int(ss)
|
||||
t.NanoSecond = int(nss)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t Time) FormatStr() string {
|
||||
return "15:04:05.999999999"
|
||||
}
|
||||
|
||||
func (t Time) GoString() string {
|
||||
return fmt.Sprintf("rfctime.NewTime(%d, %d, %d, %d)", t.Hour, t.Minute, t.Second, t.NanoSecond)
|
||||
}
|
||||
|
||||
func (t Time) String() string {
|
||||
return fmt.Sprintf("%04d:%02d:%02d.%09d", t.Hour, t.Minute, t.Second, t.NanoSecond)
|
||||
}
|
||||
|
||||
func NewTime(h int, m int, s int, ns int) Time {
|
||||
return Time{
|
||||
Hour: h,
|
||||
Minute: m,
|
||||
Second: s,
|
||||
NanoSecond: ns,
|
||||
}
|
||||
}
|
||||
|
||||
func NewTimeFromTS(t time.Time) Time {
|
||||
return Time{
|
||||
Hour: t.Hour(),
|
||||
Minute: t.Minute(),
|
||||
Second: t.Second(),
|
||||
NanoSecond: t.Nanosecond(),
|
||||
}
|
||||
}
|
||||
|
||||
func NowTime(loc *time.Location) Time {
|
||||
now := time.Now().In(loc)
|
||||
return NewTime(now.Hour(), now.Minute(), now.Second(), now.Nanosecond())
|
||||
}
|
||||
|
||||
func NowTimeLoc() Time {
|
||||
return NowTime(time.UTC)
|
||||
}
|
||||
|
||||
func NowTimeUTC() Time {
|
||||
return NowTime(time.Local)
|
||||
}
|
@@ -1,13 +1,14 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn string) (string, PP, error) {
|
||||
func BuildUpdateStatement[TData any](q Queryable, tableName string, obj TData, idColumn string) (string, PP, error) {
|
||||
rval := reflect.ValueOf(obj)
|
||||
rtyp := rval.Type()
|
||||
|
||||
@@ -70,7 +71,7 @@ func BuildUpdateStatement(q Queryable, tableName string, obj any, idColumn strin
|
||||
return fmt.Sprintf("UPDATE %s SET %s WHERE %s", tableName, strings.Join(setClauses, ", "), matchClause), params, nil
|
||||
}
|
||||
|
||||
func BuildInsertStatement(q Queryable, tableName string, obj any) (string, PP, error) {
|
||||
func BuildInsertStatement[TData any](q Queryable, tableName string, obj TData) (string, PP, error) {
|
||||
rval := reflect.ValueOf(obj)
|
||||
rtyp := rval.Type()
|
||||
|
||||
@@ -118,3 +119,81 @@ func BuildInsertStatement(q Queryable, tableName string, obj any) (string, PP, e
|
||||
//goland:noinspection SqlNoDataSourceInspection
|
||||
return fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", tableName, strings.Join(fields, ", "), strings.Join(values, ", ")), params, nil
|
||||
}
|
||||
|
||||
func BuildInsertMultipleStatement[TData any](q Queryable, tableName string, vArr []TData) (string, PP, error) {
|
||||
|
||||
if len(vArr) == 0 {
|
||||
return "", nil, errors.New("no data supplied")
|
||||
}
|
||||
|
||||
rtyp := reflect.ValueOf(vArr[0]).Type()
|
||||
|
||||
sqlPrefix := ""
|
||||
{
|
||||
columns := make([]string, 0)
|
||||
|
||||
for i := 0; i < rtyp.NumField(); i++ {
|
||||
rsfield := rtyp.Field(i)
|
||||
|
||||
if !rsfield.IsExported() {
|
||||
continue
|
||||
}
|
||||
|
||||
columnName := rsfield.Tag.Get("db")
|
||||
if columnName == "" || columnName == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
columns = append(columns, "\""+columnName+"\"")
|
||||
}
|
||||
|
||||
sqlPrefix = fmt.Sprintf("INSERT"+" INTO \"%s\" (%s) VALUES", tableName, strings.Join(columns, ", "))
|
||||
}
|
||||
|
||||
pp := PP{}
|
||||
|
||||
sqlValuesArr := make([]string, 0)
|
||||
|
||||
for _, v := range vArr {
|
||||
|
||||
rval := reflect.ValueOf(v)
|
||||
|
||||
params := make([]string, 0)
|
||||
|
||||
for i := 0; i < rtyp.NumField(); i++ {
|
||||
|
||||
rsfield := rtyp.Field(i)
|
||||
rvfield := rval.Field(i)
|
||||
|
||||
if !rsfield.IsExported() {
|
||||
continue
|
||||
}
|
||||
|
||||
columnName := rsfield.Tag.Get("db")
|
||||
if columnName == "" || columnName == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
if rsfield.Type.Kind() == reflect.Ptr && rvfield.IsNil() {
|
||||
|
||||
params = append(params, "NULL")
|
||||
|
||||
} else {
|
||||
|
||||
val, err := convertValueToDB(q, rvfield.Interface())
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
params = append(params, ":"+pp.Add(val))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
sqlValuesArr = append(sqlValuesArr, fmt.Sprintf("(%s)", strings.Join(params, ", ")))
|
||||
}
|
||||
|
||||
sqlstr := fmt.Sprintf("%s %s", sqlPrefix, strings.Join(sqlValuesArr, ", "))
|
||||
|
||||
return sqlstr, pp, nil
|
||||
}
|
||||
|
@@ -8,6 +8,8 @@ import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rfctime"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -78,6 +80,40 @@ var ConverterRFC339NanoTimeToString = NewDBTypeConverter[rfctime.RFC3339NanoTime
|
||||
return rfctime.NewRFC3339Nano(t), nil
|
||||
})
|
||||
|
||||
var ConverterRFCDateToString = NewDBTypeConverter[rfctime.Date, string](func(v rfctime.Date) (string, error) {
|
||||
return fmt.Sprintf("%04d-%02d-%02d", v.Year, v.Month, v.Day), nil
|
||||
}, func(v string) (rfctime.Date, error) {
|
||||
split := strings.Split(v, "-")
|
||||
if len(split) != 3 {
|
||||
return rfctime.Date{}, errors.New("invalid date format: " + v)
|
||||
}
|
||||
year, err := strconv.ParseInt(split[0], 10, 32)
|
||||
if err != nil {
|
||||
return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error())
|
||||
}
|
||||
month, err := strconv.ParseInt(split[0], 10, 32)
|
||||
if err != nil {
|
||||
return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error())
|
||||
}
|
||||
day, err := strconv.ParseInt(split[0], 10, 32)
|
||||
if err != nil {
|
||||
return rfctime.Date{}, errors.New("invalid date format: " + v + ": " + err.Error())
|
||||
}
|
||||
|
||||
return rfctime.Date{Year: int(year), Month: int(month), Day: int(day)}, nil
|
||||
})
|
||||
|
||||
var ConverterRFCTimeToString = NewDBTypeConverter[rfctime.Time, string](func(v rfctime.Time) (string, error) {
|
||||
return v.SerializeShort(), nil
|
||||
}, func(v string) (rfctime.Time, error) {
|
||||
res := rfctime.Time{}
|
||||
err := res.Deserialize(v)
|
||||
if err != nil {
|
||||
return rfctime.Time{}, err
|
||||
}
|
||||
return res, nil
|
||||
})
|
||||
|
||||
var ConverterJsonObjToString = NewDBTypeConverter[JsonObj, string](func(v JsonObj) (string, error) {
|
||||
mrsh, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
|
@@ -154,4 +154,6 @@ func (db *database) RegisterDefaultConverter() {
|
||||
db.RegisterConverter(ConverterExErrCategoryToString)
|
||||
db.RegisterConverter(ConverterExErrSeverityToString)
|
||||
db.RegisterConverter(ConverterExErrTypeToString)
|
||||
db.RegisterConverter(ConverterRFCDateToString)
|
||||
db.RegisterConverter(ConverterRFCTimeToString)
|
||||
}
|
||||
|
49
sq/filter.go
Normal file
49
sq/filter.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package sq
|
||||
|
||||
import ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
||||
|
||||
type FilterSort struct {
|
||||
Field string
|
||||
Direction ct.SortDirection
|
||||
}
|
||||
|
||||
type PaginateFilter interface {
|
||||
SQL(params PP) (filterClause string, joinClause string, joinTables []string)
|
||||
Sort() []FilterSort
|
||||
}
|
||||
|
||||
type genericPaginateFilter struct {
|
||||
sql func(params PP) (filterClause string, joinClause string, joinTables []string)
|
||||
sort func() []FilterSort
|
||||
}
|
||||
|
||||
func (g genericPaginateFilter) SQL(params PP) (filterClause string, joinClause string, joinTables []string) {
|
||||
return g.sql(params)
|
||||
}
|
||||
|
||||
func (g genericPaginateFilter) Sort() []FilterSort {
|
||||
return g.sort()
|
||||
}
|
||||
|
||||
func NewPaginateFilter(sql func(params PP) (filterClause string, joinClause string, joinTables []string), sort []FilterSort) PaginateFilter {
|
||||
return genericPaginateFilter{
|
||||
sql: func(params PP) (filterClause string, joinClause string, joinTables []string) {
|
||||
return sql(params)
|
||||
},
|
||||
sort: func() []FilterSort {
|
||||
return sort
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func NewSimplePaginateFilter(filterClause string, filterParams PP, sort []FilterSort) PaginateFilter {
|
||||
return genericPaginateFilter{
|
||||
sql: func(params PP) (string, string, []string) {
|
||||
params.AddAll(filterParams)
|
||||
return filterClause, "", nil
|
||||
},
|
||||
sort: func() []FilterSort {
|
||||
return sort
|
||||
},
|
||||
}
|
||||
}
|
15
sq/main_test.go
Normal file
15
sq/main_test.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
if !exerr.Initialized() {
|
||||
exerr.Init(exerr.ErrorPackageConfigInit{ZeroLogErrTraces: langext.PFalse, ZeroLogAllTraces: langext.PFalse})
|
||||
}
|
||||
os.Exit(m.Run())
|
||||
}
|
@@ -3,22 +3,11 @@ package sq
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
pag "gogs.mikescher.com/BlackForestBytes/goext/pagination"
|
||||
)
|
||||
|
||||
type PaginateFilter interface {
|
||||
SQL(params PP) (filterClause string, joinClause string, joinTables []string)
|
||||
Sort() []FilterSort
|
||||
}
|
||||
|
||||
type FilterSort struct {
|
||||
Field string
|
||||
Direction ct.SortDirection
|
||||
}
|
||||
|
||||
func Paginate[TData any](ctx context.Context, q Queryable, table string, filter PaginateFilter, scanMode StructScanMode, scanSec StructScanSafety, page int, limit *int) ([]TData, pag.Pagination, error) {
|
||||
prepParams := PP{}
|
||||
|
||||
|
@@ -20,6 +20,12 @@ func (pp *PP) Add(v any) string {
|
||||
return id
|
||||
}
|
||||
|
||||
func (pp *PP) AddAll(other PP) {
|
||||
for id, v := range other {
|
||||
(*pp)[id] = v
|
||||
}
|
||||
}
|
||||
|
||||
func PPID() string {
|
||||
return "p_" + langext.RandBase62(8)
|
||||
}
|
||||
|
179
sq/scanner.go
179
sq/scanner.go
@@ -6,8 +6,9 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/exerr"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type StructScanMode string
|
||||
@@ -26,43 +27,11 @@ const (
|
||||
|
||||
func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, v TData) (sql.Result, error) {
|
||||
|
||||
rval := reflect.ValueOf(v)
|
||||
rtyp := rval.Type()
|
||||
|
||||
columns := make([]string, 0)
|
||||
params := make([]string, 0)
|
||||
pp := PP{}
|
||||
|
||||
for i := 0; i < rtyp.NumField(); i++ {
|
||||
|
||||
rsfield := rtyp.Field(i)
|
||||
rvfield := rval.Field(i)
|
||||
|
||||
if !rsfield.IsExported() {
|
||||
continue
|
||||
}
|
||||
|
||||
columnName := rsfield.Tag.Get("db")
|
||||
if columnName == "" || columnName == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
paramkey := fmt.Sprintf("_%s", columnName)
|
||||
|
||||
columns = append(columns, "\""+columnName+"\"")
|
||||
params = append(params, ":"+paramkey)
|
||||
|
||||
val, err := convertValueToDB(q, rvfield.Interface())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pp[paramkey] = val
|
||||
|
||||
sqlstr, pp, err := BuildInsertStatement(q, tableName, v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sqlstr := fmt.Sprintf("INSERT"+" INTO \"%s\" (%s) VALUES (%s)", tableName, strings.Join(columns, ", "), strings.Join(params, ", "))
|
||||
|
||||
sqlr, err := q.Exec(ctx, sqlstr, pp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -71,6 +40,127 @@ func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string,
|
||||
return sqlr, nil
|
||||
}
|
||||
|
||||
func InsertAndQuerySingle[TData any](ctx context.Context, q Queryable, tableName string, v TData, idColumn string, mode StructScanMode, sec StructScanSafety) (TData, error) {
|
||||
|
||||
rval := reflect.ValueOf(v)
|
||||
|
||||
idRVal := fieldByTag(rval, "db", idColumn)
|
||||
if !idRVal.IsValid() || idRVal.IsZero() {
|
||||
return *new(TData), fmt.Errorf("failed to find idColumn '%s' in %T", idColumn, v)
|
||||
}
|
||||
|
||||
idValue, err := convertValueToDB(q, idRVal.Interface())
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
_, err = InsertSingle[TData](ctx, q, tableName, v)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
pp := PP{}
|
||||
|
||||
//goland:noinspection ALL
|
||||
sqlstr := fmt.Sprintf("SELECT * FROM %s WHERE %s = :%s", tableName, idColumn, pp.Add(idValue))
|
||||
|
||||
return QuerySingle[TData](ctx, q, sqlstr, pp, mode, sec)
|
||||
}
|
||||
|
||||
func fieldByTag(rval reflect.Value, tagkey string, tagval string) reflect.Value {
|
||||
rtyp := rval.Type()
|
||||
for i := 0; i < rtyp.NumField(); i++ {
|
||||
rsfield := rtyp.Field(i)
|
||||
|
||||
if !rsfield.IsExported() {
|
||||
continue
|
||||
}
|
||||
|
||||
if rsfield.Tag.Get(tagkey) == tagval {
|
||||
return rval.Field(i)
|
||||
}
|
||||
}
|
||||
panic(fmt.Sprintf("tag %s = '%s' not found in %s", tagkey, tagval, rtyp.Name()))
|
||||
}
|
||||
|
||||
func InsertMultiple[TData any](ctx context.Context, q Queryable, tableName string, vArr []TData, maxBatch int) ([]sql.Result, error) {
|
||||
|
||||
if len(vArr) == 0 {
|
||||
return make([]sql.Result, 0), nil
|
||||
}
|
||||
|
||||
chunks := langext.ArrChunk(vArr, maxBatch)
|
||||
|
||||
sqlstrArr := make([]string, 0)
|
||||
ppArr := make([]PP, 0)
|
||||
|
||||
for _, chunk := range chunks {
|
||||
|
||||
sqlstr, pp, err := BuildInsertMultipleStatement(q, tableName, chunk)
|
||||
if err != nil {
|
||||
return nil, exerr.Wrap(err, "").Build()
|
||||
}
|
||||
|
||||
sqlstrArr = append(sqlstrArr, sqlstr)
|
||||
ppArr = append(ppArr, pp)
|
||||
}
|
||||
|
||||
res := make([]sql.Result, 0, len(sqlstrArr))
|
||||
|
||||
for i := 0; i < len(sqlstrArr); i++ {
|
||||
sqlr, err := q.Exec(ctx, sqlstrArr[i], ppArr[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res = append(res, sqlr)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func UpdateSingle[TData any](ctx context.Context, q Queryable, tableName string, v TData, idColumn string) (sql.Result, error) {
|
||||
|
||||
sqlstr, pp, err := BuildUpdateStatement(q, tableName, v, idColumn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sqlr, err := q.Exec(ctx, sqlstr, pp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sqlr, nil
|
||||
}
|
||||
|
||||
func UpdateAndQuerySingle[TData any](ctx context.Context, q Queryable, tableName string, v TData, idColumn string, mode StructScanMode, sec StructScanSafety) (TData, error) {
|
||||
|
||||
rval := reflect.ValueOf(v)
|
||||
|
||||
idRVal := fieldByTag(rval, "db", idColumn)
|
||||
if !idRVal.IsValid() || idRVal.IsZero() {
|
||||
return *new(TData), fmt.Errorf("failed to find idColumn '%s' in %T", idColumn, v)
|
||||
}
|
||||
|
||||
idValue, err := convertValueToDB(q, idRVal.Interface())
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
_, err = UpdateSingle[TData](ctx, q, tableName, v, idColumn)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
pp := PP{}
|
||||
|
||||
//goland:noinspection ALL
|
||||
sqlstr := fmt.Sprintf("SELECT * FROM %s WHERE %s = :%s", tableName, idColumn, pp.Add(idValue))
|
||||
|
||||
return QuerySingle[TData](ctx, q, sqlstr, pp, mode, sec)
|
||||
}
|
||||
|
||||
func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) (TData, error) {
|
||||
rows, err := q.Query(ctx, sql, pp)
|
||||
if err != nil {
|
||||
@@ -85,6 +175,23 @@ func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP,
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func QuerySingleOpt[TData any](ctx context.Context, q Queryable, sqlstr string, pp PP, mode StructScanMode, sec StructScanSafety) (*TData, error) {
|
||||
rows, err := q.Query(ctx, sqlstr, pp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, err := ScanSingle[TData](ctx, q, rows, mode, sec, true)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &data, nil
|
||||
}
|
||||
|
||||
func QueryAll[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) ([]TData, error) {
|
||||
rows, err := q.Query(ctx, sql, pp)
|
||||
if err != nil {
|
||||
|
237
sq/scanner_test.go
Normal file
237
sq/scanner_test.go
Normal file
@@ -0,0 +1,237 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"github.com/glebarez/go-sqlite"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestInsertSingle(t *testing.T) {
|
||||
|
||||
type request struct {
|
||||
ID string `json:"id" db:"id"`
|
||||
Timestamp int `json:"timestamp" db:"timestamp"`
|
||||
StrVal string `json:"strVal" db:"str_val"`
|
||||
FloatVal float64 `json:"floatVal" db:"float_val"`
|
||||
Dummy bool `json:"dummyBool" db:"dummy_bool"`
|
||||
JsonVal JsonObj `json:"jsonVal" db:"json_val"`
|
||||
}
|
||||
|
||||
if !langext.InArray("sqlite3", sql.Drivers()) {
|
||||
sqlite.RegisterAsSQLITE3()
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
dbdir := t.TempDir()
|
||||
dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3")
|
||||
|
||||
url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000)
|
||||
|
||||
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
|
||||
|
||||
db := NewDB(xdb)
|
||||
db.RegisterDefaultConverter()
|
||||
|
||||
_, err := db.Exec(ctx, `
|
||||
CREATE TABLE requests (
|
||||
id TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
str_val TEXT NOT NULL,
|
||||
float_val REAL NOT NULL,
|
||||
dummy_bool INTEGER NOT NULL CHECK(dummy_bool IN (0, 1)),
|
||||
json_val TEXT NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
) STRICT
|
||||
`, PP{})
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
_, err = InsertSingle(ctx, db, "requests", request{
|
||||
ID: "9927",
|
||||
Timestamp: 12321,
|
||||
StrVal: "hello world",
|
||||
Dummy: true,
|
||||
FloatVal: 3.14159,
|
||||
JsonVal: JsonObj{
|
||||
"firs": 1,
|
||||
"second": true,
|
||||
},
|
||||
})
|
||||
tst.AssertNoErr(t, err)
|
||||
}
|
||||
|
||||
func TestUpdateSingle(t *testing.T) {
|
||||
|
||||
type request struct {
|
||||
ID string `json:"id" db:"id"`
|
||||
Timestamp int `json:"timestamp" db:"timestamp"`
|
||||
StrVal string `json:"strVal" db:"str_val"`
|
||||
FloatVal float64 `json:"floatVal" db:"float_val"`
|
||||
Dummy bool `json:"dummyBool" db:"dummy_bool"`
|
||||
JsonVal JsonObj `json:"jsonVal" db:"json_val"`
|
||||
}
|
||||
|
||||
if !langext.InArray("sqlite3", sql.Drivers()) {
|
||||
sqlite.RegisterAsSQLITE3()
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
dbdir := t.TempDir()
|
||||
dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3")
|
||||
|
||||
url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000)
|
||||
|
||||
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
|
||||
|
||||
db := NewDB(xdb)
|
||||
db.RegisterDefaultConverter()
|
||||
|
||||
_, err := db.Exec(ctx, `
|
||||
CREATE TABLE requests (
|
||||
id TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
str_val TEXT NOT NULL,
|
||||
float_val REAL NOT NULL,
|
||||
dummy_bool INTEGER NOT NULL CHECK(dummy_bool IN (0, 1)),
|
||||
json_val TEXT NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
) STRICT
|
||||
`, PP{})
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
_, err = InsertSingle(ctx, db, "requests", request{
|
||||
ID: "9927",
|
||||
Timestamp: 12321,
|
||||
StrVal: "hello world",
|
||||
Dummy: true,
|
||||
FloatVal: 3.14159,
|
||||
JsonVal: JsonObj{
|
||||
"first": 1,
|
||||
"second": true,
|
||||
},
|
||||
})
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
v, err := QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '9927' LIMIT 1", PP{}, SModeExtended, Safe)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertEqual(t, v.Timestamp, 12321)
|
||||
tst.AssertEqual(t, v.StrVal, "hello world")
|
||||
tst.AssertEqual(t, v.Dummy, true)
|
||||
tst.AssertEqual(t, v.FloatVal, 3.14159)
|
||||
tst.AssertStrRepEqual(t, v.JsonVal["first"], 1)
|
||||
tst.AssertStrRepEqual(t, v.JsonVal["second"], true)
|
||||
|
||||
_, err = UpdateSingle(ctx, db, "requests", request{
|
||||
ID: "9927",
|
||||
Timestamp: 9999,
|
||||
StrVal: "9999 hello world",
|
||||
Dummy: false,
|
||||
FloatVal: 123.222,
|
||||
JsonVal: JsonObj{
|
||||
"first": 2,
|
||||
"second": false,
|
||||
},
|
||||
}, "id")
|
||||
|
||||
v, err = QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '9927' LIMIT 1", PP{}, SModeExtended, Safe)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertEqual(t, v.Timestamp, 9999)
|
||||
tst.AssertEqual(t, v.StrVal, "9999 hello world")
|
||||
tst.AssertEqual(t, v.Dummy, false)
|
||||
tst.AssertEqual(t, v.FloatVal, 123.222)
|
||||
tst.AssertStrRepEqual(t, v.JsonVal["first"], 2)
|
||||
tst.AssertStrRepEqual(t, v.JsonVal["second"], false)
|
||||
}
|
||||
|
||||
func TestInsertMultiple(t *testing.T) {
|
||||
|
||||
type request struct {
|
||||
ID string `json:"id" db:"id"`
|
||||
Timestamp int `json:"timestamp" db:"timestamp"`
|
||||
StrVal string `json:"strVal" db:"str_val"`
|
||||
FloatVal float64 `json:"floatVal" db:"float_val"`
|
||||
Dummy bool `json:"dummyBool" db:"dummy_bool"`
|
||||
JsonVal JsonObj `json:"jsonVal" db:"json_val"`
|
||||
}
|
||||
|
||||
if !langext.InArray("sqlite3", sql.Drivers()) {
|
||||
sqlite.RegisterAsSQLITE3()
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
dbdir := t.TempDir()
|
||||
dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3")
|
||||
|
||||
url := fmt.Sprintf("file:%s?_pragma=journal_mode(%s)&_pragma=timeout(%d)&_pragma=foreign_keys(%s)&_pragma=busy_timeout(%d)", dbfile1, "DELETE", 1000, "true", 1000)
|
||||
|
||||
xdb := tst.Must(sqlx.Open("sqlite", url))(t)
|
||||
|
||||
db := NewDB(xdb)
|
||||
db.RegisterDefaultConverter()
|
||||
|
||||
_, err := db.Exec(ctx, `
|
||||
CREATE TABLE requests (
|
||||
id TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
str_val TEXT NOT NULL,
|
||||
float_val REAL NOT NULL,
|
||||
dummy_bool INTEGER NOT NULL CHECK(dummy_bool IN (0, 1)),
|
||||
json_val TEXT NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
) STRICT
|
||||
`, PP{})
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
_, err = InsertMultiple(ctx, db, "requests", []request{
|
||||
{
|
||||
ID: "1",
|
||||
Timestamp: 1000,
|
||||
StrVal: "one",
|
||||
Dummy: true,
|
||||
FloatVal: 0.1,
|
||||
JsonVal: JsonObj{
|
||||
"arr": []int{0},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "2",
|
||||
Timestamp: 2000,
|
||||
StrVal: "two",
|
||||
Dummy: true,
|
||||
FloatVal: 0.2,
|
||||
JsonVal: JsonObj{
|
||||
"arr": []int{0, 0},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "3",
|
||||
Timestamp: 3000,
|
||||
StrVal: "three",
|
||||
Dummy: true,
|
||||
FloatVal: 0.3,
|
||||
JsonVal: JsonObj{
|
||||
"arr": []int{0, 0, 0},
|
||||
},
|
||||
},
|
||||
}, -1)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
_, err = QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '1' LIMIT 1", PP{}, SModeExtended, Safe)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
_, err = QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '2' LIMIT 1", PP{}, SModeExtended, Safe)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
_, err = QuerySingle[request](ctx, db, "SELECT * FROM requests WHERE id = '3' LIMIT 1", PP{}, SModeExtended, Safe)
|
||||
tst.AssertNoErr(t, err)
|
||||
}
|
@@ -2,6 +2,7 @@ package tst
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"runtime/debug"
|
||||
"testing"
|
||||
@@ -125,3 +126,17 @@ func AssertNoErr(t *testing.T, anerr error) {
|
||||
t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack()))
|
||||
}
|
||||
}
|
||||
|
||||
func AssertStrRepEqual(t *testing.T, actual any, expected any) {
|
||||
t.Helper()
|
||||
if fmt.Sprintf("%v", actual) != fmt.Sprintf("%v", expected) {
|
||||
t.Errorf("values differ: Actual: '%v', Expected: '%v'", actual, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func AssertStrRepNotEqual(t *testing.T, actual any, expected any) {
|
||||
t.Helper()
|
||||
if fmt.Sprintf("%v", actual) == fmt.Sprintf("%v", expected) {
|
||||
t.Errorf("values do not differ: Actual: '%v', Expected: '%v'", actual, expected)
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user