Compare commits
23 Commits
Author | SHA1 | Date | |
---|---|---|---|
f610a2202c
|
|||
2807299d46
|
|||
e872dbccec
|
|||
9daf71e2ed
|
|||
fe278f7772
|
|||
8ebda6fb3a
|
|||
b0d3ce8c1c
|
|||
021465e524
|
|||
cf9c73aa4a
|
|||
0652bf22dc
|
|||
b196adffc7
|
|||
717065e62d
|
|||
e7b2b040b2
|
|||
05d0f9e469
|
|||
ccd03e50c8
|
|||
1c77c2b8e8
|
|||
9f6f967299
|
|||
18c83f0f76
|
|||
a64f336e24
|
|||
14bbd205f8
|
|||
cecfb0d788
|
|||
a445e6f623
|
|||
0aa6310971
|
4
Makefile
4
Makefile
@@ -3,7 +3,9 @@ run:
|
||||
echo "This is a library - can't be run" && false
|
||||
|
||||
test:
|
||||
go test ./...
|
||||
# go test ./...
|
||||
which gotestsum || go install gotest.tools/gotestsum@latest
|
||||
gotestsum --format "testname" -- -tags="timetzdata sqlite_fts5 sqlite_foreign_keys" "./test"
|
||||
|
||||
version:
|
||||
_data/version.sh
|
@@ -8,6 +8,7 @@ import (
|
||||
"os"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -172,6 +173,20 @@ func parseEnvToValue(envval string, fullEnvKey string, rvtype reflect.Type) (ref
|
||||
|
||||
return envcvl, nil
|
||||
|
||||
} else if rvtype.ConvertibleTo(reflect.TypeOf(false)) {
|
||||
|
||||
if strings.TrimSpace(strings.ToLower(envval)) == "true" {
|
||||
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "false" {
|
||||
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "1" {
|
||||
return reflect.ValueOf(false).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "0" {
|
||||
return reflect.ValueOf(false).Convert(rvtype), nil
|
||||
} else {
|
||||
return reflect.Value{}, errors.New(fmt.Sprintf("Failed to parse env-config variable '%s' to <%s, ,bool> (value := '%s')", rvtype.Name(), fullEnvKey, envval))
|
||||
}
|
||||
|
||||
} else if rvtype.ConvertibleTo(reflect.TypeOf("")) {
|
||||
|
||||
envcvl := reflect.ValueOf(envval).Convert(rvtype)
|
||||
|
@@ -68,6 +68,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
V7 aliasstring `env:"TEST_V7"`
|
||||
V8 time.Duration `env:"TEST_V8"`
|
||||
V9 time.Time `env:"TEST_V9"`
|
||||
VA bool `env:"TEST_VA"`
|
||||
}
|
||||
|
||||
data := testdata{
|
||||
@@ -82,6 +83,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
V7: "7",
|
||||
V8: 9,
|
||||
V9: time.Unix(1671102873, 0),
|
||||
VA: false,
|
||||
}
|
||||
|
||||
t.Setenv("TEST_V1", "846")
|
||||
@@ -93,6 +95,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
t.Setenv("TEST_V7", "AAAAAA")
|
||||
t.Setenv("TEST_V8", "1min4s")
|
||||
t.Setenv("TEST_V9", "2009-11-10T23:00:00Z")
|
||||
t.Setenv("TEST_VA", "true")
|
||||
|
||||
err := ApplyEnvOverrides("", &data, ".")
|
||||
if err != nil {
|
||||
@@ -109,6 +112,7 @@ func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
tst.AssertEqual(t, data.V7, "AAAAAA")
|
||||
tst.AssertEqual(t, data.V8, time.Second*64)
|
||||
tst.AssertEqual(t, data.V9, time.Unix(1257894000, 0).UTC())
|
||||
tst.AssertEqual(t, data.VA, true)
|
||||
}
|
||||
|
||||
func TestApplyEnvOverridesRecursive(t *testing.T) {
|
||||
|
@@ -167,15 +167,30 @@ func Marshal(v any) ([]byte, error) {
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
type IndentOpt struct {
|
||||
Prefix string
|
||||
Indent string
|
||||
}
|
||||
|
||||
// MarshalSafeCollections is like Marshal except it will marshal nil maps and
|
||||
// slices as '{}' and '[]' respectfully instead of 'null'
|
||||
func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool) ([]byte, error) {
|
||||
func MarshalSafeCollections(v interface{}, nilSafeSlices bool, nilSafeMaps bool, indent *IndentOpt) ([]byte, error) {
|
||||
e := &encodeState{}
|
||||
err := e.marshal(v, encOpts{escapeHTML: true, nilSafeSlices: nilSafeSlices, nilSafeMaps: nilSafeMaps})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b := e.Bytes()
|
||||
if indent != nil {
|
||||
var buf bytes.Buffer
|
||||
err = Indent(&buf, b, indent.Prefix, indent.Indent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
} else {
|
||||
return e.Bytes(), nil
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalIndent is like Marshal but applies Indent to format the output.
|
||||
|
@@ -1274,7 +1274,7 @@ func TestMarshalSafeCollections(t *testing.T) {
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
b, err := MarshalSafeCollections(tt.in, true, true)
|
||||
b, err := MarshalSafeCollections(tt.in, true, true, nil)
|
||||
if err != nil {
|
||||
t.Errorf("test %d, unexpected failure: %v", i, err)
|
||||
}
|
||||
|
44
gojson/gionic.go
Normal file
44
gojson/gionic.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Render interface is copied from github.com/gin-gonic/gin@v1.8.1/render/render.go
|
||||
type Render interface {
|
||||
// Render writes data with custom ContentType.
|
||||
Render(http.ResponseWriter) error
|
||||
// WriteContentType writes custom ContentType.
|
||||
WriteContentType(w http.ResponseWriter)
|
||||
}
|
||||
|
||||
type GoJsonRender struct {
|
||||
Data any
|
||||
NilSafeSlices bool
|
||||
NilSafeMaps bool
|
||||
Indent *IndentOpt
|
||||
}
|
||||
|
||||
func (r GoJsonRender) Render(w http.ResponseWriter) error {
|
||||
header := w.Header()
|
||||
if val := header["Content-Type"]; len(val) == 0 {
|
||||
header["Content-Type"] = []string{"application/json; charset=utf-8"}
|
||||
}
|
||||
|
||||
jsonBytes, err := MarshalSafeCollections(r.Data, r.NilSafeSlices, r.NilSafeMaps, r.Indent)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
_, err = w.Write(jsonBytes)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r GoJsonRender) WriteContentType(w http.ResponseWriter) {
|
||||
header := w.Header()
|
||||
if val := header["Content-Type"]; len(val) == 0 {
|
||||
header["Content-Type"] = []string{"application/json; charset=utf-8"}
|
||||
}
|
||||
}
|
@@ -433,3 +433,10 @@ func ArrConcat[T any](arr ...[]T) []T {
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// ArrCopy does a shallow copy of the 'in' array
|
||||
func ArrCopy[T any](in []T) []T {
|
||||
out := make([]T, len(in))
|
||||
copy(out, in)
|
||||
return out
|
||||
}
|
||||
|
@@ -31,16 +31,16 @@ func CompareIntArr(arr1 []int, arr2 []int) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) bool {
|
||||
func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) int {
|
||||
|
||||
for i := 0; i < len(arr1) || i < len(arr2); i++ {
|
||||
|
||||
if i < len(arr1) && i < len(arr2) {
|
||||
|
||||
if arr1[i] < arr2[i] {
|
||||
return true
|
||||
return -1
|
||||
} else if arr1[i] > arr2[i] {
|
||||
return false
|
||||
return +2
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
@@ -49,15 +49,55 @@ func CompareArr[T OrderedConstraint](arr1 []T, arr2 []T) bool {
|
||||
|
||||
if i < len(arr1) {
|
||||
|
||||
return true
|
||||
return +1
|
||||
|
||||
} else { // if i < len(arr2)
|
||||
|
||||
return false
|
||||
return -1
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return false
|
||||
return 0
|
||||
}
|
||||
|
||||
func CompareString(a, b string) int {
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return +1
|
||||
}
|
||||
|
||||
func CompareInt(a, b int) int {
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return +1
|
||||
}
|
||||
|
||||
func CompareInt64(a, b int64) int {
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return +1
|
||||
}
|
||||
|
||||
func Compare[T OrderedConstraint](a, b T) int {
|
||||
if a == b {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return +1
|
||||
}
|
||||
|
@@ -1,5 +1,10 @@
|
||||
package langext
|
||||
|
||||
type MapEntry[T comparable, V any] struct {
|
||||
Key T
|
||||
Value V
|
||||
}
|
||||
|
||||
func MapKeyArr[T comparable, V any](v map[T]V) []T {
|
||||
result := make([]T, 0, len(v))
|
||||
for k := range v {
|
||||
@@ -8,6 +13,14 @@ func MapKeyArr[T comparable, V any](v map[T]V) []T {
|
||||
return result
|
||||
}
|
||||
|
||||
func MapValueArr[T comparable, V any](v map[T]V) []V {
|
||||
result := make([]V, 0, len(v))
|
||||
for _, mv := range v {
|
||||
result = append(result, mv)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
|
||||
result := make(map[T]V, len(a))
|
||||
for _, v := range a {
|
||||
@@ -16,6 +29,17 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
|
||||
return result
|
||||
}
|
||||
|
||||
func MapToArr[T comparable, V any](v map[T]V) []MapEntry[T, V] {
|
||||
result := make([]MapEntry[T, V], 0, len(v))
|
||||
for mk, mv := range v {
|
||||
result = append(result, MapEntry[T, V]{
|
||||
Key: mk,
|
||||
Value: mv,
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func CopyMap[K comparable, V any](a map[K]V) map[K]V {
|
||||
result := make(map[K]V, len(a))
|
||||
for k, v := range a {
|
||||
@@ -23,3 +47,11 @@ func CopyMap[K comparable, V any](a map[K]V) map[K]V {
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func ForceMap[K comparable, V any](v map[K]V) map[K]V {
|
||||
if v == nil {
|
||||
return make(map[K]V, 0)
|
||||
} else {
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
71
langext/panic.go
Normal file
71
langext/panic.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package langext
|
||||
|
||||
type PanicWrappedErr struct {
|
||||
panic any
|
||||
}
|
||||
|
||||
func (p PanicWrappedErr) Error() string {
|
||||
return "A panic occured"
|
||||
}
|
||||
|
||||
func (p PanicWrappedErr) ReoveredObj() any {
|
||||
return p.panic
|
||||
}
|
||||
|
||||
func RunPanicSafe(fn func()) (err error) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
fn()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func RunPanicSafeR1(fn func() error) (err error) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
return fn()
|
||||
}
|
||||
|
||||
func RunPanicSafeR2[T1 any](fn func() (T1, error)) (r1 T1, err error) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
r1 = *new(T1)
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
return fn()
|
||||
}
|
||||
|
||||
func RunPanicSafeR3[T1 any, T2 any](fn func() (T1, T2, error)) (r1 T1, r2 T2, err error) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
r1 = *new(T1)
|
||||
r2 = *new(T2)
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
return fn()
|
||||
}
|
||||
|
||||
func RunPanicSafeR4[T1 any, T2 any, T3 any](fn func() (T1, T2, T3, error)) (r1 T1, r2 T2, r3 T3, err error) {
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
r1 = *new(T1)
|
||||
r2 = *new(T2)
|
||||
r3 = *new(T3)
|
||||
err = PanicWrappedErr{panic: rec}
|
||||
}
|
||||
}()
|
||||
|
||||
return fn()
|
||||
}
|
@@ -4,6 +4,12 @@ import (
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// PTrue := &true
|
||||
var PTrue = Ptr(true)
|
||||
|
||||
// PFalse := &false
|
||||
var PFalse = Ptr(false)
|
||||
|
||||
func Ptr[T any](v T) *T {
|
||||
return &v
|
||||
}
|
||||
|
111
langext/reflection.go
Normal file
111
langext/reflection.go
Normal file
@@ -0,0 +1,111 @@
|
||||
package langext
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
)
|
||||
|
||||
var reflectBasicTypes = []reflect.Type{
|
||||
reflect.Bool: reflect.TypeOf(false),
|
||||
reflect.Int: reflect.TypeOf(int(0)),
|
||||
reflect.Int8: reflect.TypeOf(int8(0)),
|
||||
reflect.Int16: reflect.TypeOf(int16(0)),
|
||||
reflect.Int32: reflect.TypeOf(int32(0)),
|
||||
reflect.Int64: reflect.TypeOf(int64(0)),
|
||||
reflect.Uint: reflect.TypeOf(uint(0)),
|
||||
reflect.Uint8: reflect.TypeOf(uint8(0)),
|
||||
reflect.Uint16: reflect.TypeOf(uint16(0)),
|
||||
reflect.Uint32: reflect.TypeOf(uint32(0)),
|
||||
reflect.Uint64: reflect.TypeOf(uint64(0)),
|
||||
reflect.Uintptr: reflect.TypeOf(uintptr(0)),
|
||||
reflect.Float32: reflect.TypeOf(float32(0)),
|
||||
reflect.Float64: reflect.TypeOf(float64(0)),
|
||||
reflect.Complex64: reflect.TypeOf(complex64(0)),
|
||||
reflect.Complex128: reflect.TypeOf(complex128(0)),
|
||||
reflect.String: reflect.TypeOf(""),
|
||||
}
|
||||
|
||||
// Underlying returns the underlying type of t (without type alias)
|
||||
//
|
||||
// https://github.com/golang/go/issues/39574#issuecomment-655664772
|
||||
func Underlying(t reflect.Type) (ret reflect.Type) {
|
||||
if t.Name() == "" {
|
||||
// t is an unnamed type. the underlying type is t itself
|
||||
return t
|
||||
}
|
||||
kind := t.Kind()
|
||||
if ret = reflectBasicTypes[kind]; ret != nil {
|
||||
return ret
|
||||
}
|
||||
switch kind {
|
||||
case reflect.Array:
|
||||
ret = reflect.ArrayOf(t.Len(), t.Elem())
|
||||
case reflect.Chan:
|
||||
ret = reflect.ChanOf(t.ChanDir(), t.Elem())
|
||||
case reflect.Map:
|
||||
ret = reflect.MapOf(t.Key(), t.Elem())
|
||||
case reflect.Func:
|
||||
nIn := t.NumIn()
|
||||
nOut := t.NumOut()
|
||||
in := make([]reflect.Type, nIn)
|
||||
out := make([]reflect.Type, nOut)
|
||||
for i := 0; i < nIn; i++ {
|
||||
in[i] = t.In(i)
|
||||
}
|
||||
for i := 0; i < nOut; i++ {
|
||||
out[i] = t.Out(i)
|
||||
}
|
||||
ret = reflect.FuncOf(in, out, t.IsVariadic())
|
||||
case reflect.Interface:
|
||||
// not supported
|
||||
case reflect.Ptr:
|
||||
ret = reflect.PtrTo(t.Elem())
|
||||
case reflect.Slice:
|
||||
ret = reflect.SliceOf(t.Elem())
|
||||
case reflect.Struct:
|
||||
// only partially supported: embedded fields
|
||||
// and unexported fields may cause panic in reflect.StructOf()
|
||||
defer func() {
|
||||
// if a panic happens, return t unmodified
|
||||
if recover() != nil && ret == nil {
|
||||
ret = t
|
||||
}
|
||||
}()
|
||||
n := t.NumField()
|
||||
fields := make([]reflect.StructField, n)
|
||||
for i := 0; i < n; i++ {
|
||||
fields[i] = t.Field(i)
|
||||
}
|
||||
ret = reflect.StructOf(fields)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// TryCast works similar to `v2, ok := v.(T)`
|
||||
// Except it works through type alias'
|
||||
func TryCast[T any](v any) (T, bool) {
|
||||
|
||||
underlying := Underlying(reflect.TypeOf(v))
|
||||
|
||||
def := *new(T)
|
||||
|
||||
if underlying != Underlying(reflect.TypeOf(def)) {
|
||||
return def, false
|
||||
}
|
||||
|
||||
r1 := reflect.ValueOf(v)
|
||||
|
||||
if !r1.CanConvert(underlying) {
|
||||
return def, false
|
||||
}
|
||||
|
||||
r2 := r1.Convert(underlying)
|
||||
|
||||
r3 := r2.Interface()
|
||||
|
||||
r4, ok := r3.(T)
|
||||
if !ok {
|
||||
return def, false
|
||||
}
|
||||
|
||||
return r4, true
|
||||
}
|
@@ -41,6 +41,14 @@ func NewHexUUID() (string, error) {
|
||||
return string(dst), nil
|
||||
}
|
||||
|
||||
func MustHexUUID() string {
|
||||
v, err := NewHexUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func NewUpperHexUUID() (string, error) {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
@@ -64,6 +72,14 @@ func NewUpperHexUUID() (string, error) {
|
||||
return strings.ToUpper(string(dst)), nil
|
||||
}
|
||||
|
||||
func MustUpperHexUUID() string {
|
||||
v, err := NewUpperHexUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func NewRawHexUUID() (string, error) {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
@@ -83,6 +99,14 @@ func NewRawHexUUID() (string, error) {
|
||||
return strings.ToUpper(string(dst)), nil
|
||||
}
|
||||
|
||||
func MustRawHexUUID() string {
|
||||
v, err := NewRawHexUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func NewBracesUUID() (string, error) {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
@@ -108,6 +132,14 @@ func NewBracesUUID() (string, error) {
|
||||
return strings.ToUpper(string(dst)), nil
|
||||
}
|
||||
|
||||
func MustBracesUUID() string {
|
||||
v, err := NewBracesUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func NewParensUUID() (string, error) {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
@@ -132,3 +164,11 @@ func NewParensUUID() (string, error) {
|
||||
|
||||
return strings.ToUpper(string(dst)), nil
|
||||
}
|
||||
|
||||
func MustParensUUID() string {
|
||||
v, err := NewParensUUID()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
@@ -22,6 +22,31 @@ func Max[T langext.OrderedConstraint](v1 T, v2 T) T {
|
||||
}
|
||||
}
|
||||
|
||||
func Max3[T langext.OrderedConstraint](v1 T, v2 T, v3 T) T {
|
||||
result := v1
|
||||
if v2 > result {
|
||||
result = v2
|
||||
}
|
||||
if v3 > result {
|
||||
result = v3
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func Max4[T langext.OrderedConstraint](v1 T, v2 T, v3 T, v4 T) T {
|
||||
result := v1
|
||||
if v2 > result {
|
||||
result = v2
|
||||
}
|
||||
if v3 > result {
|
||||
result = v3
|
||||
}
|
||||
if v4 > result {
|
||||
result = v4
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func Min[T langext.OrderedConstraint](v1 T, v2 T) T {
|
||||
if v1 < v2 {
|
||||
return v1
|
||||
@@ -30,6 +55,31 @@ func Min[T langext.OrderedConstraint](v1 T, v2 T) T {
|
||||
}
|
||||
}
|
||||
|
||||
func Min3[T langext.OrderedConstraint](v1 T, v2 T, v3 T) T {
|
||||
result := v1
|
||||
if v2 < result {
|
||||
result = v2
|
||||
}
|
||||
if v3 < result {
|
||||
result = v3
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func Min4[T langext.OrderedConstraint](v1 T, v2 T, v3 T, v4 T) T {
|
||||
result := v1
|
||||
if v2 < result {
|
||||
result = v2
|
||||
}
|
||||
if v3 < result {
|
||||
result = v3
|
||||
}
|
||||
if v4 < result {
|
||||
result = v4
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func Abs[T langext.NumberConstraint](v T) T {
|
||||
if v < 0 {
|
||||
return -v
|
||||
|
49
mongoext/pipeline.go
Normal file
49
mongoext/pipeline.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package mongoext
|
||||
|
||||
import (
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
)
|
||||
|
||||
// FixTextSearchPipeline moves {$match:{$text:{$search}}} entries to the front of the pipeline (otherwise its an mongo error)
|
||||
func FixTextSearchPipeline(pipeline mongo.Pipeline) mongo.Pipeline {
|
||||
|
||||
dget := func(v bson.D, k string) (bson.M, bool) {
|
||||
for _, e := range v {
|
||||
if e.Key == k {
|
||||
if mv, ok := e.Value.(bson.M); ok {
|
||||
return mv, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
mget := func(v bson.M, k string) (bson.M, bool) {
|
||||
for ekey, eval := range v {
|
||||
if ekey == k {
|
||||
if mv, ok := eval.(bson.M); ok {
|
||||
return mv, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
result := make([]bson.D, 0, len(pipeline))
|
||||
|
||||
for _, entry := range pipeline {
|
||||
|
||||
if v0, ok := dget(entry, "$match"); ok {
|
||||
if v1, ok := mget(v0, "$text"); ok {
|
||||
if _, ok := v1["$search"]; ok {
|
||||
result = append([]bson.D{entry}, result...)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = append(result, entry)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
30
mongoext/projections.go
Normal file
30
mongoext/projections.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package mongoext
|
||||
|
||||
import (
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ProjectionFromStruct automatically generated a mongodb projection for a struct
|
||||
// This way you can pretty much always write
|
||||
// `options.FindOne().SetProjection(mongoutils.ProjectionFromStruct(...your_model...))`
|
||||
// to only get the data from mongodb that you will actually use in the later decode step
|
||||
func ProjectionFromStruct(obj interface{}) bson.M {
|
||||
v := reflect.ValueOf(obj)
|
||||
t := v.Type()
|
||||
|
||||
result := bson.M{}
|
||||
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
tag := t.Field(i).Tag.Get("bson")
|
||||
if tag == "" {
|
||||
continue
|
||||
}
|
||||
tag = strings.Split(tag, ",")[0]
|
||||
|
||||
result[tag] = 1
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
25
mongoext/registry.go
Normal file
25
mongoext/registry.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package mongoext
|
||||
|
||||
import (
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/bson/bsoncodec"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rfctime"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
func CreateGoExtBsonRegistry() *bsoncodec.Registry {
|
||||
rb := bsoncodec.NewRegistryBuilder()
|
||||
|
||||
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339Time{}), rfctime.RFC3339Time{})
|
||||
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339Time{}), rfctime.RFC3339Time{})
|
||||
|
||||
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{})
|
||||
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{})
|
||||
|
||||
bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb)
|
||||
bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb)
|
||||
|
||||
bson.PrimitiveCodecs{}.RegisterPrimitiveCodecs(rb)
|
||||
|
||||
return rb.Build()
|
||||
}
|
@@ -5,7 +5,10 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/bson/bsoncodec"
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
"go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -67,6 +70,10 @@ func (t *RFC3339Time) UnmarshalText(data []byte) error {
|
||||
|
||||
func (t *RFC3339Time) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
|
||||
if bt == bsontype.Null {
|
||||
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
|
||||
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
|
||||
// https://stackoverflow.com/questions/75167597
|
||||
// https://jira.mongodb.org/browse/GODRIVER-2252
|
||||
*t = RFC3339Time{}
|
||||
return nil
|
||||
}
|
||||
@@ -86,6 +93,32 @@ func (t RFC3339Time) MarshalBSONValue() (bsontype.Type, []byte, error) {
|
||||
return bson.MarshalValue(time.Time(t))
|
||||
}
|
||||
|
||||
func (t RFC3339Time) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
|
||||
if val.Kind() == reflect.Ptr && val.IsNil() {
|
||||
if !val.CanSet() {
|
||||
return errors.New("ValueUnmarshalerDecodeValue")
|
||||
}
|
||||
val.Set(reflect.New(val.Type().Elem()))
|
||||
}
|
||||
|
||||
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if val.Kind() == reflect.Ptr && len(src) == 0 {
|
||||
val.Set(reflect.Zero(val.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
err = t.UnmarshalBSONValue(tp, src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t RFC3339Time) Serialize() string {
|
||||
return t.Time().Format(t.FormatStr())
|
||||
}
|
||||
|
@@ -5,7 +5,10 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/bson/bsoncodec"
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
"go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -67,6 +70,10 @@ func (t *RFC3339NanoTime) UnmarshalText(data []byte) error {
|
||||
|
||||
func (t *RFC3339NanoTime) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
|
||||
if bt == bsontype.Null {
|
||||
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
|
||||
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
|
||||
// https://stackoverflow.com/questions/75167597
|
||||
// https://jira.mongodb.org/browse/GODRIVER-2252
|
||||
*t = RFC3339NanoTime{}
|
||||
return nil
|
||||
}
|
||||
@@ -86,6 +93,38 @@ func (t RFC3339NanoTime) MarshalBSONValue() (bsontype.Type, []byte, error) {
|
||||
return bson.MarshalValue(time.Time(t))
|
||||
}
|
||||
|
||||
func (t RFC3339NanoTime) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
|
||||
if val.Kind() == reflect.Ptr && val.IsNil() {
|
||||
if !val.CanSet() {
|
||||
return errors.New("ValueUnmarshalerDecodeValue")
|
||||
}
|
||||
val.Set(reflect.New(val.Type().Elem()))
|
||||
}
|
||||
|
||||
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if val.Kind() == reflect.Ptr && len(src) == 0 {
|
||||
val.Set(reflect.Zero(val.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
err = t.UnmarshalBSONValue(tp, src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if val.Kind() == reflect.Ptr {
|
||||
val.Set(reflect.ValueOf(&t))
|
||||
} else {
|
||||
val.Set(reflect.ValueOf(t))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t RFC3339NanoTime) Serialize() string {
|
||||
return t.Time().Format(t.FormatStr())
|
||||
}
|
||||
|
91
sq/converter.go
Normal file
91
sq/converter.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"time"
|
||||
)
|
||||
|
||||
//TODO UNFINISHED
|
||||
// this is not finished
|
||||
// idea was that we can register converter in the database struct
|
||||
// they get inherited from the transactions
|
||||
// and when marshallingunmarshaling (sq.Query | sq.QueryAll)
|
||||
// or marshaling (sq.InsertSingle)
|
||||
// the types get converter automatically...
|
||||
|
||||
type DBTypeConverter interface {
|
||||
ModelTypeString() string
|
||||
DBTypeString() string
|
||||
ModelToDB(v any) (any, error)
|
||||
DBToModel(v any) (any, error)
|
||||
}
|
||||
|
||||
var ConverterBoolToBit = NewDBTypeConverter[bool, int](func(v bool) (int, error) {
|
||||
return langext.Conditional(v, 1, 0), nil
|
||||
}, func(v int) (bool, error) {
|
||||
if v == 0 {
|
||||
return false, nil
|
||||
}
|
||||
if v == 1 {
|
||||
return true, nil
|
||||
}
|
||||
return false, errors.New(fmt.Sprintf("invalid valud for boolean: '%d'", v))
|
||||
})
|
||||
|
||||
var ConverterTimeToUnixMillis = NewDBTypeConverter[time.Time, int64](func(v time.Time) (int64, error) {
|
||||
return v.UnixMilli(), nil
|
||||
}, func(v int64) (time.Time, error) {
|
||||
return time.UnixMilli(v), nil
|
||||
})
|
||||
|
||||
var ConverterOptTimeToUnixMillis = NewDBTypeConverter[*time.Time, *int64](func(v *time.Time) (*int64, error) {
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return langext.Ptr(v.UnixMilli()), nil
|
||||
}, func(v *int64) (*time.Time, error) {
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return langext.Ptr(time.UnixMilli(*v)), nil
|
||||
})
|
||||
|
||||
type dbTypeConverterImpl[TModelData any, TDBData any] struct {
|
||||
dbTypeString string
|
||||
modelTypeString string
|
||||
todb func(v TModelData) (TDBData, error)
|
||||
tomodel func(v TDBData) (TModelData, error)
|
||||
}
|
||||
|
||||
func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelTypeString() string {
|
||||
return t.modelTypeString
|
||||
}
|
||||
|
||||
func (t *dbTypeConverterImpl[TModelData, TDBData]) DBTypeString() string {
|
||||
return t.dbTypeString
|
||||
}
|
||||
|
||||
func (t *dbTypeConverterImpl[TModelData, TDBData]) ModelToDB(v any) (any, error) {
|
||||
if vv, ok := v.(TModelData); ok {
|
||||
return t.todb(vv)
|
||||
}
|
||||
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.modelTypeString, v))
|
||||
}
|
||||
|
||||
func (t *dbTypeConverterImpl[TModelData, TDBData]) DBToModel(v any) (any, error) {
|
||||
if vv, ok := v.(TDBData); ok {
|
||||
return t.tomodel(vv)
|
||||
}
|
||||
return nil, errors.New(fmt.Sprintf("Unexpected value in DBTypeConverter, expected '%s', found '%T'", t.dbTypeString, v))
|
||||
}
|
||||
|
||||
func NewDBTypeConverter[TModelData any, TDBData any](todb func(v TModelData) (TDBData, error), tomodel func(v TDBData) (TModelData, error)) DBTypeConverter {
|
||||
return &dbTypeConverterImpl[TModelData, TDBData]{
|
||||
dbTypeString: fmt.Sprintf("%T", *new(TDBData)),
|
||||
modelTypeString: fmt.Sprintf("%T", *new(TModelData)),
|
||||
todb: todb,
|
||||
tomodel: tomodel,
|
||||
}
|
||||
}
|
199
sq/hasher.go
Normal file
199
sq/hasher.go
Normal file
@@ -0,0 +1,199 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func HashSqliteSchema(ctx context.Context, schemaStr string) (string, error) {
|
||||
dbdir := os.TempDir()
|
||||
dbfile1 := filepath.Join(dbdir, langext.MustHexUUID()+".sqlite3")
|
||||
|
||||
err := os.MkdirAll(dbdir, os.ModePerm)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("file:%s?_journal=%s&_timeout=%d&_fk=%s&_busy_timeout=%d", dbfile1, "DELETE", 1000, "true", 1000)
|
||||
|
||||
xdb, err := sqlx.Open("sqlite3", url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
db := NewDB(xdb)
|
||||
|
||||
_, err = db.Exec(ctx, schemaStr, PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return HashSqliteDatabase(ctx, db)
|
||||
}
|
||||
|
||||
func HashSqliteDatabase(ctx context.Context, db DB) (string, error) {
|
||||
ss, err := CreateSqliteDatabaseSchemaString(ctx, db)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
cs := sha256.Sum256([]byte(ss))
|
||||
|
||||
return hex.EncodeToString(cs[:]), nil
|
||||
}
|
||||
|
||||
func CreateSqliteDatabaseSchemaString(ctx context.Context, db DB) (string, error) {
|
||||
|
||||
type colInfo struct {
|
||||
Name string `db:"name"`
|
||||
Type string `db:"type"`
|
||||
NotNull string `db:"notnull"`
|
||||
Default *string `db:"dflt_value"`
|
||||
PrimaryKey *string `db:"pk"`
|
||||
}
|
||||
|
||||
type idxInfo struct {
|
||||
Name string `json:"name" db:"name"`
|
||||
Unique int `json:"unique" db:"unique"`
|
||||
Origin string `json:"origin" db:"origin"`
|
||||
Patial int `json:"partial" db:"partial"`
|
||||
}
|
||||
|
||||
type fkyInfo struct {
|
||||
TableDest string `json:"table_dest" db:"table"`
|
||||
From string `json:"from" db:"from"`
|
||||
To string `json:"to" db:"to"`
|
||||
OnUpdate string `json:"on_update" db:"on_update"`
|
||||
OnDelete string `json:"on_delete" db:"on_delete"`
|
||||
Match string `json:"match" db:"match"`
|
||||
}
|
||||
|
||||
type tabInfo struct {
|
||||
Name string `json:"name" db:"name"`
|
||||
Type string `json:"type" db:"type"`
|
||||
NumCol int `json:"ncol" db:"ncol"`
|
||||
Strict int `json:"strict" db:"strict"`
|
||||
|
||||
ColumnInfo []colInfo `json:"-"`
|
||||
IndexInfo []idxInfo `json:"-"`
|
||||
FKeyInfo []fkyInfo `json:"-"`
|
||||
}
|
||||
|
||||
rowsTableList, err := db.Query(ctx, "PRAGMA table_list;", PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
tableList, err := ScanAll[tabInfo](rowsTableList, SModeFast, Unsafe, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
langext.SortBy(tableList, func(v tabInfo) string { return v.Name })
|
||||
|
||||
result := make([]tabInfo, 0)
|
||||
|
||||
for i, tab := range tableList {
|
||||
|
||||
if strings.HasPrefix(tab.Name, "sqlite_") {
|
||||
continue
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
rowsColumnList, err := db.Query(ctx, fmt.Sprintf("PRAGMA table_info(\"%s\");", tab.Name), PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
columnList, err := ScanAll[colInfo](rowsColumnList, SModeFast, Unsafe, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
langext.SortBy(columnList, func(v colInfo) string { return v.Name })
|
||||
|
||||
tableList[i].ColumnInfo = columnList
|
||||
}
|
||||
|
||||
{
|
||||
rowsIdxList, err := db.Query(ctx, fmt.Sprintf("PRAGMA index_list(\"%s\");", tab.Name), PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
idxList, err := ScanAll[idxInfo](rowsIdxList, SModeFast, Unsafe, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
langext.SortBy(idxList, func(v idxInfo) string { return v.Name })
|
||||
|
||||
tableList[i].IndexInfo = idxList
|
||||
}
|
||||
|
||||
{
|
||||
rowsIdxList, err := db.Query(ctx, fmt.Sprintf("PRAGMA foreign_key_list(\"%s\");", tab.Name), PP{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
fkyList, err := ScanAll[fkyInfo](rowsIdxList, SModeFast, Unsafe, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
langext.SortBy(fkyList, func(v fkyInfo) string { return v.From })
|
||||
|
||||
tableList[i].FKeyInfo = fkyList
|
||||
}
|
||||
|
||||
result = append(result, tableList[i])
|
||||
}
|
||||
|
||||
strBuilderResult := ""
|
||||
for _, vTab := range result {
|
||||
jbinTable, err := json.Marshal(vTab)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
strBuilderResult += fmt.Sprintf("#TABLE: %s\n{\n", string(jbinTable))
|
||||
|
||||
for _, vCol := range vTab.ColumnInfo {
|
||||
jbinColumn, err := json.Marshal(vCol)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
strBuilderResult += fmt.Sprintf(" COLUMN: %s\n", string(jbinColumn))
|
||||
}
|
||||
|
||||
for _, vIdx := range vTab.IndexInfo {
|
||||
jbinIndex, err := json.Marshal(vIdx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
strBuilderResult += fmt.Sprintf(" INDEX: %s\n", string(jbinIndex))
|
||||
}
|
||||
|
||||
for _, vFky := range vTab.FKeyInfo {
|
||||
jbinFKey, err := json.Marshal(vFky)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
strBuilderResult += fmt.Sprintf(" FKEY: %s\n", string(jbinFKey))
|
||||
}
|
||||
|
||||
strBuilderResult += "}\n\n"
|
||||
}
|
||||
|
||||
return strBuilderResult, nil
|
||||
}
|
@@ -1,9 +1,13 @@
|
||||
package sq
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type StructScanMode string
|
||||
@@ -16,10 +20,79 @@ const (
|
||||
type StructScanSafety string
|
||||
|
||||
const (
|
||||
Safe StructScanSafety = "SAFE"
|
||||
Unsafe StructScanSafety = "UNSAFE"
|
||||
Safe StructScanSafety = "SAFE" // return error for missing fields
|
||||
Unsafe StructScanSafety = "UNSAFE" // ignore missing fields
|
||||
)
|
||||
|
||||
func InsertSingle[TData any](ctx context.Context, q Queryable, tableName string, v TData) (sql.Result, error) {
|
||||
|
||||
rval := reflect.ValueOf(v)
|
||||
rtyp := rval.Type()
|
||||
|
||||
columns := make([]string, 0)
|
||||
params := make([]string, 0)
|
||||
pp := PP{}
|
||||
|
||||
for i := 0; i < rtyp.NumField(); i++ {
|
||||
|
||||
rsfield := rtyp.Field(i)
|
||||
rvfield := rval.Field(i)
|
||||
|
||||
if !rsfield.IsExported() {
|
||||
continue
|
||||
}
|
||||
|
||||
columnName := rsfield.Tag.Get("db")
|
||||
if columnName == "" || columnName == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
paramkey := fmt.Sprintf("_%s", columnName)
|
||||
|
||||
columns = append(columns, "\""+columnName+"\"")
|
||||
params = append(params, ":"+paramkey)
|
||||
pp[paramkey] = rvfield.Interface()
|
||||
|
||||
}
|
||||
|
||||
sqlstr := fmt.Sprintf("INSERT"+" INTO \"%s\" (%s) VALUES (%s)", tableName, strings.Join(columns, ", "), strings.Join(params, ", "))
|
||||
|
||||
sqlr, err := q.Exec(ctx, sqlstr, pp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sqlr, nil
|
||||
}
|
||||
|
||||
func QuerySingle[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) (TData, error) {
|
||||
rows, err := q.Query(ctx, sql, pp)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
data, err := ScanSingle[TData](rows, mode, sec, true)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func QueryAll[TData any](ctx context.Context, q Queryable, sql string, pp PP, mode StructScanMode, sec StructScanSafety) ([]TData, error) {
|
||||
rows, err := q.Query(ctx, sql, pp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, err := ScanAll[TData](rows, mode, sec, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func ScanSingle[TData any](rows *sqlx.Rows, mode StructScanMode, sec StructScanSafety, close bool) (TData, error) {
|
||||
if rows.Next() {
|
||||
var strscan *StructScanner
|
||||
|
Reference in New Issue
Block a user