Compare commits
34 Commits
Author | SHA1 | Date | |
---|---|---|---|
710c257c64
|
|||
c320bb3d90 | |||
2f01a1d50f
|
|||
ffc57b7e89
|
|||
d88cd3c22b
|
|||
ac5ad640bd
|
|||
21d241f9b1
|
|||
2569c165f8
|
|||
ee262a94fb
|
|||
7977c0e59c
|
|||
ceff0161c6
|
|||
a30da61419
|
|||
b613b122e3
|
|||
d017530444
|
|||
8de83cc290
|
|||
603ec82b83
|
|||
93c4cf31a8
|
|||
dc2d8a9103
|
|||
6589e8d5cd
|
|||
0006c6859d
|
|||
827b3fc1b7
|
|||
f7dce4a102
|
|||
45d4fd7101
|
|||
c7df9d2264
|
|||
d0954bf133
|
|||
8affa81bb9
|
|||
fe9ebf0bab
|
|||
a4b5f33d15
|
|||
e89e2c18f2
|
|||
b16d5152c7
|
|||
5fb2f8a312
|
|||
2ad820be8d
|
|||
555096102a
|
|||
d76d7b5cb9
|
6
.idea/goext.iml
generated
6
.idea/goext.iml
generated
@@ -1,6 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="WEB_MODULE" version="4">
|
||||
<component name="Go" enabled="true" />
|
||||
<component name="Go" enabled="true">
|
||||
<buildTags>
|
||||
<option name="goVersion" value="1.19" />
|
||||
</buildTags>
|
||||
</component>
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
|
57
README.md
57
README.md
@@ -10,31 +10,32 @@ Potentially needs `export GOPRIVATE="gogs.mikescher.com"`
|
||||
|
||||
### Packages:
|
||||
|
||||
| Name | Maintainer | Description |
|
||||
|-------------|------------|---------------------------------------------------------------------------------------------------------------|
|
||||
| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) |
|
||||
| mathext | Mike | Utility/Helper functions for math |
|
||||
| cryptext | Mike | Utility/Helper functions for encryption |
|
||||
| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels |
|
||||
| dataext | Mike | Various useful data structures |
|
||||
| zipext | Mike | Utility for zip/gzip/tar etc |
|
||||
| | | |
|
||||
| mongoext | Mike | Utility/Helper functions for mongodb |
|
||||
| cursortoken | Mike | MongoDB cursortoken implementation |
|
||||
| | | |
|
||||
| totpext | Mike | Implementation of TOTP (2-Factor-Auth) |
|
||||
| termext | Mike | Utilities for terminals (mostly color output) |
|
||||
| confext | Mike | Parses environment configuration into structs |
|
||||
| cmdext | Mike | Runner for external commands/processes |
|
||||
| | | |
|
||||
| sq | Mike | Utility functions for sql based databases |
|
||||
| tst | Mike | Utility functions for unit tests |
|
||||
| | | |
|
||||
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
|
||||
| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps |
|
||||
| | | |
|
||||
| bfcodegen | Mike | Various codegen tools (run via go generate) |
|
||||
| | | |
|
||||
| rext | Mike | Regex Wrapper, wraps regexp with a better interface |
|
||||
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
|
||||
| | | |
|
||||
| Name | Maintainer | Description |
|
||||
|--------------|------------|---------------------------------------------------------------------------------------------------------------|
|
||||
| langext | Mike | General uttility/helper functions, (everything thats missing from go standard library) |
|
||||
| mathext | Mike | Utility/Helper functions for math |
|
||||
| cryptext | Mike | Utility/Helper functions for encryption |
|
||||
| syncext | Mike | Utility/Helper funtions for multi-threading / mutex / channels |
|
||||
| dataext | Mike | Various useful data structures |
|
||||
| zipext | Mike | Utility for zip/gzip/tar etc |
|
||||
| reflectext | Mike | Utility for golagn reflection |
|
||||
| | | |
|
||||
| mongoext | Mike | Utility/Helper functions for mongodb |
|
||||
| cursortoken | Mike | MongoDB cursortoken implementation |
|
||||
| | | |
|
||||
| totpext | Mike | Implementation of TOTP (2-Factor-Auth) |
|
||||
| termext | Mike | Utilities for terminals (mostly color output) |
|
||||
| confext | Mike | Parses environment configuration into structs |
|
||||
| cmdext | Mike | Runner for external commands/processes |
|
||||
| | | |
|
||||
| sq | Mike | Utility functions for sql based databases |
|
||||
| tst | Mike | Utility functions for unit tests |
|
||||
| | | |
|
||||
| rfctime | Mike | Classes for time seriallization, with different marshallign method for mongo and json |
|
||||
| gojson | Mike | Same interface for marshalling/unmarshalling as go/json, except with proper serialization of null arrays/maps |
|
||||
| | | |
|
||||
| bfcodegen | Mike | Various codegen tools (run via go generate) |
|
||||
| | | |
|
||||
| rext | Mike | Regex Wrapper, wraps regexp with a better interface |
|
||||
| wmo | Mike | Mongo Wrapper, wraps mongodb with a better interface |
|
||||
| | | |
|
@@ -23,6 +23,8 @@ fi
|
||||
|
||||
git pull --ff
|
||||
|
||||
go get -u ./...
|
||||
|
||||
curr_vers=$(git describe --tags --abbrev=0 | sed 's/v//g')
|
||||
|
||||
next_ver=$(echo "$curr_vers" | awk -F. -v OFS=. 'NF==1{print ++$NF}; NF>1{if(length($NF+1)>length($NF))$(NF-1)++; $NF=sprintf("%0*d", length($NF), ($NF+1)%(10^length($NF))); print}')
|
||||
@@ -32,6 +34,8 @@ echo "> Current Version: ${curr_vers}"
|
||||
echo "> Next Version: ${next_ver}"
|
||||
echo ""
|
||||
|
||||
printf "package goext\n\nconst GoextVersion = \"%s\"\n\nconst GoextVersionTimestamp = \"%s\"\n" "${next_ver}" "$( date +"%Y-%m-%dT%H:%M:%S%z" )" > "goextVersion.go"
|
||||
|
||||
git add --verbose .
|
||||
|
||||
msg="v${next_ver}"
|
||||
|
@@ -3,11 +3,15 @@ package bfcodegen
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cmdext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/cryptext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -21,6 +25,7 @@ type EnumDefVal struct {
|
||||
|
||||
type EnumDef struct {
|
||||
File string
|
||||
FileRelative string
|
||||
EnumTypeName string
|
||||
Type string
|
||||
Values []EnumDefVal
|
||||
@@ -32,6 +37,8 @@ var rexEnumDef = rext.W(regexp.MustCompile("^\\s*type\\s+(?P<name>[A-Za-z0-9_]+)
|
||||
|
||||
var rexValueDef = rext.W(regexp.MustCompile("^\\s*(?P<name>[A-Za-z0-9_]+)\\s+(?P<type>[A-Za-z0-9_]+)\\s*=\\s*(?P<value>(\"[A-Za-z0-9_:]+\"|[0-9]+))\\s*(//(?P<descr>.*))?.*$"))
|
||||
|
||||
var rexChecksumConst = rext.W(regexp.MustCompile("const ChecksumGenerator = \"(?P<cs>[A-Za-z0-9_]*)\""))
|
||||
|
||||
func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
|
||||
files, err := os.ReadDir(sourceDir)
|
||||
@@ -39,17 +46,46 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
oldChecksum := "N/A"
|
||||
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
||||
content, err := os.ReadFile(destFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if m, ok := rexChecksumConst.MatchFirst(string(content)); ok {
|
||||
oldChecksum = m.GroupByName("cs").Value()
|
||||
}
|
||||
}
|
||||
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||
langext.SortBy(files, func(v os.DirEntry) string { return v.Name() })
|
||||
|
||||
newChecksumStr := goext.GoextVersion
|
||||
for _, f := range files {
|
||||
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
||||
}
|
||||
|
||||
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
|
||||
|
||||
if newChecksum != oldChecksum {
|
||||
fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||
} else {
|
||||
fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||
return nil
|
||||
}
|
||||
|
||||
allEnums := make([]EnumDef, 0)
|
||||
|
||||
pkgname := ""
|
||||
|
||||
for _, f := range files {
|
||||
if !strings.HasSuffix(f.Name(), ".go") {
|
||||
continue
|
||||
}
|
||||
|
||||
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||
fileEnums, pn, err := processFile(f.Name())
|
||||
fileEnums, pn, err := processFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -67,7 +103,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
return errors.New("no package name found in any file")
|
||||
}
|
||||
|
||||
err = os.WriteFile(destFile, []byte(fmtOutput(allEnums, pkgname)), 0o755)
|
||||
err = os.WriteFile(destFile, []byte(fmtOutput(newChecksum, allEnums, pkgname)), 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -89,7 +125,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func processFile(fn string) ([]EnumDef, string, error) {
|
||||
func processFile(basedir string, fn string) ([]EnumDef, string, error) {
|
||||
file, err := os.Open(fn)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
@@ -119,8 +155,15 @@ func processFile(fn string) ([]EnumDef, string, error) {
|
||||
}
|
||||
|
||||
if match, ok := rexEnumDef.MatchFirst(line); ok {
|
||||
|
||||
rfp, err := filepath.Rel(basedir, fn)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
def := EnumDef{
|
||||
File: fn,
|
||||
FileRelative: rfp,
|
||||
EnumTypeName: match.GroupByName("name").Value(),
|
||||
Type: match.GroupByName("type").Value(),
|
||||
Values: make([]EnumDefVal, 0),
|
||||
@@ -159,8 +202,8 @@ func processFile(fn string) ([]EnumDef, string, error) {
|
||||
return enums, pkgname, nil
|
||||
}
|
||||
|
||||
func fmtOutput(enums []EnumDef, pkgname string) string {
|
||||
str := "// Code generated by permissions_gen.sh DO NOT EDIT.\n"
|
||||
func fmtOutput(cs string, enums []EnumDef, pkgname string) string {
|
||||
str := "// Code generated by enum-generate.go DO NOT EDIT.\n"
|
||||
str += "\n"
|
||||
str += "package " + pkgname + "\n"
|
||||
str += "\n"
|
||||
@@ -168,6 +211,9 @@ func fmtOutput(enums []EnumDef, pkgname string) string {
|
||||
str += "import \"gogs.mikescher.com/BlackForestBytes/goext/langext\"" + "\n"
|
||||
str += "\n"
|
||||
|
||||
str += "const ChecksumGenerator = \"" + cs + "\"" + "\n"
|
||||
str += "\n"
|
||||
|
||||
str += "type Enum interface {" + "\n"
|
||||
str += " Valid() bool" + "\n"
|
||||
str += " ValuesAny() []any" + "\n"
|
||||
@@ -202,7 +248,7 @@ func fmtOutput(enums []EnumDef, pkgname string) string {
|
||||
|
||||
str += "// ================================ " + enumdef.EnumTypeName + " ================================" + "\n"
|
||||
str += "//" + "\n"
|
||||
str += "// File: " + enumdef.File + "\n"
|
||||
str += "// File: " + enumdef.FileRelative + "\n"
|
||||
str += "// StringEnum: " + langext.Conditional(hasStr, "true", "false") + "\n"
|
||||
str += "// DescrEnum: " + langext.Conditional(hasDescr, "true", "false") + "\n"
|
||||
str += "//" + "\n"
|
||||
|
15
bfcodegen/enum-generate_test.go
Normal file
15
bfcodegen/enum-generate_test.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package bfcodegen
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestApplyEnvOverridesSimple(t *testing.T) {
|
||||
|
||||
err := GenerateEnumSpecs("/home/mike/Code/reiff/badennet/bnet-backend/models", "/home/mike/Code/reiff/badennet/bnet-backend/models/enums_gen.go")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
t.Fail()
|
||||
}
|
||||
|
||||
}
|
@@ -178,9 +178,9 @@ func parseEnvToValue(envval string, fullEnvKey string, rvtype reflect.Type) (ref
|
||||
if strings.TrimSpace(strings.ToLower(envval)) == "true" {
|
||||
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "false" {
|
||||
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "1" {
|
||||
return reflect.ValueOf(false).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "1" {
|
||||
return reflect.ValueOf(true).Convert(rvtype), nil
|
||||
} else if strings.TrimSpace(strings.ToLower(envval)) == "0" {
|
||||
return reflect.ValueOf(false).Convert(rvtype), nil
|
||||
} else {
|
||||
|
@@ -3,6 +3,7 @@ package cryptext
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
@@ -14,14 +15,15 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const LatestPassHashVersion = 4
|
||||
const LatestPassHashVersion = 5
|
||||
|
||||
// PassHash
|
||||
// - [v0]: plaintext password ( `0|...` )
|
||||
// - [v1]: sha256(plaintext)
|
||||
// - [v2]: seed | sha256<seed>(plaintext)
|
||||
// - [v3]: seed | sha256<seed>(plaintext) | [hex(totp)]
|
||||
// - [v4]: bcrypt(plaintext) | [hex(totp)]
|
||||
// - [v0]: plaintext password ( `0|...` ) // simple, used to write PW's directly in DB
|
||||
// - [v1]: sha256(plaintext) // simple hashing
|
||||
// - [v2]: seed | sha256<seed>(plaintext) // add seed
|
||||
// - [v3]: seed | sha256<seed>(plaintext) | [hex(totp)] // add TOTP support
|
||||
// - [v4]: bcrypt(plaintext) | [hex(totp)] // use proper bcrypt
|
||||
// - [v5]: bcrypt(sha512(plaintext)) | [hex(totp)] // hash pw before bcrypt (otherwise max pw-len = 72)
|
||||
type PassHash string
|
||||
|
||||
func (ph PassHash) Valid() bool {
|
||||
@@ -109,7 +111,21 @@ func (ph PassHash) Data() (_version int, _seed []byte, _payload []byte, _totp bo
|
||||
totp := false
|
||||
totpsecret := make([]byte, 0)
|
||||
if split[2] != "0" {
|
||||
totpsecret, err = hex.DecodeString(split[3])
|
||||
totpsecret, err = hex.DecodeString(split[2])
|
||||
totp = true
|
||||
}
|
||||
return int(version), nil, payload, totp, totpsecret, true
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
if len(split) != 3 {
|
||||
return -1, nil, nil, false, nil, false
|
||||
}
|
||||
payload := []byte(split[1])
|
||||
totp := false
|
||||
totpsecret := make([]byte, 0)
|
||||
if split[2] != "0" {
|
||||
totpsecret, err = hex.DecodeString(split[2])
|
||||
totp = true
|
||||
}
|
||||
return int(version), nil, payload, totp, totpsecret, true
|
||||
@@ -156,6 +172,14 @@ func (ph PassHash) Verify(plainpass string, totp *string) bool {
|
||||
}
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
if !hastotp {
|
||||
return bcrypt.CompareHashAndPassword(payload, hash512(plainpass)) == nil
|
||||
} else {
|
||||
return bcrypt.CompareHashAndPassword(payload, hash512(plainpass)) == nil && totpext.Validate(totpsecret, *totp)
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -209,6 +233,12 @@ func (ph PassHash) ClearTOTP() (PassHash, error) {
|
||||
return PassHash(strings.Join(split, "|")), nil
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
split := strings.Split(string(ph), "|")
|
||||
split[2] = "0"
|
||||
return PassHash(strings.Join(split, "|")), nil
|
||||
}
|
||||
|
||||
return "", errors.New("unknown version")
|
||||
}
|
||||
|
||||
@@ -242,6 +272,12 @@ func (ph PassHash) WithTOTP(totpSecret []byte) (PassHash, error) {
|
||||
return PassHash(strings.Join(split, "|")), nil
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
split := strings.Split(string(ph), "|")
|
||||
split[2] = hex.EncodeToString(totpSecret)
|
||||
return PassHash(strings.Join(split, "|")), nil
|
||||
}
|
||||
|
||||
return "", errors.New("unknown version")
|
||||
}
|
||||
|
||||
@@ -271,6 +307,10 @@ func (ph PassHash) Change(newPlainPass string) (PassHash, error) {
|
||||
return HashPasswordV4(newPlainPass, langext.Conditional(hastotp, totpsecret, nil))
|
||||
}
|
||||
|
||||
if version == 5 {
|
||||
return HashPasswordV5(newPlainPass, langext.Conditional(hastotp, totpsecret, nil))
|
||||
}
|
||||
|
||||
return "", errors.New("unknown version")
|
||||
}
|
||||
|
||||
@@ -279,7 +319,24 @@ func (ph PassHash) String() string {
|
||||
}
|
||||
|
||||
func HashPassword(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||
return HashPasswordV4(plainpass, totpSecret)
|
||||
return HashPasswordV5(plainpass, totpSecret)
|
||||
}
|
||||
|
||||
func HashPasswordV5(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||
var strtotp string
|
||||
|
||||
if totpSecret == nil {
|
||||
strtotp = "0"
|
||||
} else {
|
||||
strtotp = hex.EncodeToString(totpSecret)
|
||||
}
|
||||
|
||||
payload, err := bcrypt.GenerateFromPassword(hash512(plainpass), bcrypt.MinCost)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return PassHash(fmt.Sprintf("5|%s|%s", string(payload), strtotp)), nil
|
||||
}
|
||||
|
||||
func HashPasswordV4(plainpass string, totpSecret []byte) (PassHash, error) {
|
||||
@@ -340,6 +397,13 @@ func HashPasswordV0(plainpass string) (PassHash, error) {
|
||||
return PassHash(fmt.Sprintf("0|%s", plainpass)), nil
|
||||
}
|
||||
|
||||
func hash512(s string) []byte {
|
||||
h := sha512.New()
|
||||
h.Write([]byte(s))
|
||||
bs := h.Sum(nil)
|
||||
return bs
|
||||
}
|
||||
|
||||
func hash256(s string) []byte {
|
||||
h := sha256.New()
|
||||
h.Write([]byte(s))
|
||||
|
210
cryptext/passHash_test.go
Normal file
210
cryptext/passHash_test.go
Normal file
@@ -0,0 +1,210 @@
|
||||
package cryptext
|
||||
|
||||
import (
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/totpext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPassHash1(t *testing.T) {
|
||||
ph, err := HashPassword("test123", nil)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
}
|
||||
|
||||
func TestPassHashTOTP(t *testing.T) {
|
||||
sec, err := totpext.GenerateSecret()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ph, err := HashPassword("test123", sec)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V0(t *testing.T) {
|
||||
ph, err := HashPasswordV0("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V1(t *testing.T) {
|
||||
ph, err := HashPasswordV1("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V2(t *testing.T) {
|
||||
ph, err := HashPasswordV2("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V3(t *testing.T) {
|
||||
ph, err := HashPasswordV3("test123", nil)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V3_TOTP(t *testing.T) {
|
||||
sec, err := totpext.GenerateSecret()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ph, err := HashPasswordV3("test123", sec)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V4(t *testing.T) {
|
||||
ph, err := HashPasswordV4("test123", nil)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertFalse(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertTrue(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
}
|
||||
|
||||
func TestPassHashUpgrade_V4_TOTP(t *testing.T) {
|
||||
sec, err := totpext.GenerateSecret()
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ph, err := HashPasswordV4("test123", sec)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertTrue(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
|
||||
ph, err = ph.Upgrade("test123")
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertTrue(t, ph.Valid())
|
||||
tst.AssertTrue(t, ph.HasTOTP())
|
||||
tst.AssertFalse(t, ph.NeedsPasswordUpgrade())
|
||||
|
||||
tst.AssertFalse(t, ph.Verify("test123", nil))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
tst.AssertTrue(t, ph.Verify("test123", langext.Ptr(totpext.TOTP(sec))))
|
||||
tst.AssertFalse(t, ph.Verify("test124", nil))
|
||||
}
|
@@ -6,5 +6,5 @@ import (
|
||||
|
||||
type Filter interface {
|
||||
FilterQuery() mongo.Pipeline
|
||||
Pagination() (string, SortDirection, *string, *SortDirection)
|
||||
Pagination() (string, SortDirection, string, SortDirection)
|
||||
}
|
||||
|
24
go.mod
24
go.mod
@@ -4,21 +4,21 @@ go 1.19
|
||||
|
||||
require (
|
||||
github.com/jmoiron/sqlx v1.3.5
|
||||
go.mongodb.org/mongo-driver v1.11.1
|
||||
golang.org/x/crypto v0.4.0
|
||||
golang.org/x/sys v0.3.0
|
||||
golang.org/x/term v0.3.0
|
||||
go.mongodb.org/mongo-driver v1.12.0
|
||||
golang.org/x/crypto v0.11.0
|
||||
golang.org/x/sys v0.10.0
|
||||
golang.org/x/term v0.10.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/golang/snappy v0.0.1 // indirect
|
||||
github.com/klauspost/compress v1.13.6 // indirect
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/klauspost/compress v1.16.7 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
github.com/xdg-go/scram v1.1.1 // indirect
|
||||
github.com/xdg-go/stringprep v1.0.3 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
|
||||
golang.org/x/text v0.5.0 // indirect
|
||||
github.com/xdg-go/scram v1.1.2 // indirect
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
|
||||
golang.org/x/sync v0.3.0 // indirect
|
||||
golang.org/x/text v0.11.0 // indirect
|
||||
)
|
||||
|
71
go.sum
71
go.sum
@@ -5,12 +5,20 @@ github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfC
|
||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
|
||||
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
|
||||
github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI=
|
||||
github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/compress v1.16.6 h1:91SKEy4K37vkp255cJ8QesJhjyRO0hn9i9G0GoUwLsk=
|
||||
github.com/klauspost/compress v1.16.6/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I=
|
||||
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
@@ -22,6 +30,8 @@ github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRU
|
||||
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
@@ -35,31 +45,92 @@ github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1 h1:VOMT+81stJgXW3CpHyqHN3AXDYIMsx56mEFrB37Mb/E=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY=
|
||||
github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4=
|
||||
github.com/xdg-go/stringprep v1.0.3 h1:kdwGpVNwPFtjs98xCGkHjQtGKh86rDcRZN17QEMCOIs=
|
||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA=
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk=
|
||||
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.mongodb.org/mongo-driver v1.11.1 h1:QP0znIRTuL0jf1oBQoAoM0C6ZJfBK4kx0Uumtv1A7w8=
|
||||
go.mongodb.org/mongo-driver v1.11.1/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8=
|
||||
go.mongodb.org/mongo-driver v1.11.7 h1:LIwYxASDLGUg/8wOhgOOZhX8tQa/9tgZPgzZoVqJvcs=
|
||||
go.mongodb.org/mongo-driver v1.11.7/go.mod h1:G9TgswdsWjX4tmDA5zfs2+6AEPpYJwqblyjsfuh8oXY=
|
||||
go.mongodb.org/mongo-driver v1.12.0 h1:aPx33jmn/rQuJXPQLZQ8NtfPQG8CaqgLThFtqRb0PiE=
|
||||
go.mongodb.org/mongo-driver v1.12.0/go.mod h1:AZkxhPnFJUoH7kZlFkVKucV20K387miPfm7oimrSmK0=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8=
|
||||
golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80=
|
||||
golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g=
|
||||
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
|
||||
golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM=
|
||||
golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I=
|
||||
golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
|
||||
golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=
|
||||
golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
|
||||
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
|
||||
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI=
|
||||
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
|
||||
golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.9.0 h1:GRRCnKYhdQrD8kfRAdQ6Zcw1P0OcELxGLKJvtjVMZ28=
|
||||
golang.org/x/term v0.9.0/go.mod h1:M6DEAAIenWoTxdKrOltXcmDY3rSplQUkrvaDU5FcQyo=
|
||||
golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
|
||||
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM=
|
||||
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58=
|
||||
golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
|
||||
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
5
goextVersion.go
Normal file
5
goextVersion.go
Normal file
@@ -0,0 +1,5 @@
|
||||
package goext
|
||||
|
||||
const GoextVersion = "0.0.171"
|
||||
|
||||
const GoextVersionTimestamp = "2023-07-18T13:34:54+0200"
|
@@ -16,6 +16,9 @@ func CreateGoExtBsonRegistry() *bsoncodec.Registry {
|
||||
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{})
|
||||
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.RFC3339NanoTime{}), rfctime.RFC3339NanoTime{})
|
||||
|
||||
rb.RegisterTypeDecoder(reflect.TypeOf(rfctime.Date{}), rfctime.Date{})
|
||||
rb.RegisterTypeDecoder(reflect.TypeOf(&rfctime.Date{}), rfctime.Date{})
|
||||
|
||||
bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb)
|
||||
bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb)
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
package langext
|
||||
package reflectext
|
||||
|
||||
import (
|
||||
"reflect"
|
136
reflectext/primStrSer.go
Normal file
136
reflectext/primStrSer.go
Normal file
@@ -0,0 +1,136 @@
|
||||
package reflectext
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var primitiveSerializer = map[reflect.Type]genSerializer{
|
||||
|
||||
reflect.TypeOf(""): newGenSerializer(serStringToString, serStringToString),
|
||||
|
||||
reflect.TypeOf(int(0)): newGenSerializer(serIntNumToString[int], serStringToSIntNum[int]),
|
||||
reflect.TypeOf(int32(0)): newGenSerializer(serIntNumToString[int32], serStringToSIntNum[int32]),
|
||||
reflect.TypeOf(int64(0)): newGenSerializer(serIntNumToString[int64], serStringToSIntNum[int64]),
|
||||
|
||||
reflect.TypeOf(uint(0)): newGenSerializer(serIntNumToString[uint], serStringToUIntNum[uint]),
|
||||
reflect.TypeOf(uint32(0)): newGenSerializer(serIntNumToString[uint32], serStringToUIntNum[uint32]),
|
||||
reflect.TypeOf(uint64(0)): newGenSerializer(serIntNumToString[uint64], serStringToUIntNum[uint64]),
|
||||
|
||||
reflect.TypeOf(float32(0)): newGenSerializer(serFloatNumToString[float32], serStringToFloatNum[float32]),
|
||||
reflect.TypeOf(float64(0)): newGenSerializer(serFloatNumToString[float64], serStringToFloatNum[float64]),
|
||||
|
||||
reflect.TypeOf(true): newGenSerializer(serBoolToString, serStringToBool),
|
||||
|
||||
reflect.TypeOf(primitive.ObjectID{}): newGenSerializer(serObjectIDToString, serStringToObjectID),
|
||||
|
||||
reflect.TypeOf(time.Time{}): newGenSerializer(serTimeToString, serStringToTime),
|
||||
}
|
||||
|
||||
type genSerializer struct {
|
||||
ToString func(v any) (string, error)
|
||||
FromString func(v string) (any, error)
|
||||
}
|
||||
|
||||
func newGenSerializer[TData any](tostr func(v TData) (string, error), fromstr func(v string) (TData, error)) genSerializer {
|
||||
return genSerializer{
|
||||
ToString: func(v any) (string, error) {
|
||||
if tdv, ok := v.(TData); ok {
|
||||
rv, err := tostr(tdv)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return rv, nil
|
||||
} else {
|
||||
return "", errors.New(fmt.Sprintf("cannot convert type %T to TData (%T)", v, *new(TData)))
|
||||
}
|
||||
},
|
||||
FromString: func(v string) (any, error) {
|
||||
nv, err := fromstr(v)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return nv, nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func serStringToString(v string) (string, error) {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func serIntNumToString[TNum langext.IntegerConstraint](v TNum) (string, error) {
|
||||
return strconv.FormatInt(int64(v), 10), nil
|
||||
}
|
||||
|
||||
func serStringToSIntNum[TNum langext.SignedConstraint](v string) (TNum, error) {
|
||||
r, err := strconv.ParseInt(v, 10, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return TNum(r), nil
|
||||
}
|
||||
|
||||
func serStringToUIntNum[TNum langext.UnsignedConstraint](v string) (TNum, error) {
|
||||
r, err := strconv.ParseUint(v, 10, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return TNum(r), nil
|
||||
}
|
||||
|
||||
func serFloatNumToString[TNum langext.FloatConstraint](v TNum) (string, error) {
|
||||
return strconv.FormatFloat(float64(v), 'f', -1, 64), nil
|
||||
}
|
||||
|
||||
func serStringToFloatNum[TNum langext.FloatConstraint](v string) (TNum, error) {
|
||||
r, err := strconv.ParseFloat(v, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return TNum(r), nil
|
||||
}
|
||||
|
||||
func serBoolToString(v bool) (string, error) {
|
||||
return langext.Conditional(v, "true", "false"), nil
|
||||
}
|
||||
|
||||
func serStringToBool(v string) (bool, error) {
|
||||
if strings.ToLower(v) == "true" {
|
||||
return true, nil
|
||||
}
|
||||
if strings.ToLower(v) == "false" {
|
||||
return true, nil
|
||||
}
|
||||
return false, errors.New(fmt.Sprintf("invalid boolean value '%s'", v))
|
||||
}
|
||||
|
||||
func serObjectIDToString(v primitive.ObjectID) (string, error) {
|
||||
return v.Hex(), nil
|
||||
}
|
||||
|
||||
func serStringToObjectID(v string) (primitive.ObjectID, error) {
|
||||
if rv, err := primitive.ObjectIDFromHex(v); err == nil {
|
||||
return rv, nil
|
||||
} else {
|
||||
return primitive.ObjectID{}, err
|
||||
}
|
||||
}
|
||||
|
||||
func serTimeToString(v time.Time) (string, error) {
|
||||
return v.Format(time.RFC3339Nano), nil
|
||||
}
|
||||
|
||||
func serStringToTime(v string) (time.Time, error) {
|
||||
if rv, err := time.Parse(time.RFC3339Nano, v); err == nil {
|
||||
return rv, nil
|
||||
} else {
|
||||
return time.Time{}, err
|
||||
}
|
||||
}
|
92
reflectext/primitiveStringSerializer.go
Normal file
92
reflectext/primitiveStringSerializer.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package reflectext
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// PrimitiveStringSerializer is used to serialize primitive types (and a few more) from and to string
|
||||
// This is not really intended to be user facing, and more as a simple building block for other mechanisms
|
||||
// supports:
|
||||
// - golang primitives (ints, uints, floats, bool, string)
|
||||
// - type aliases
|
||||
// - time.Time
|
||||
// - primitive.ObjectID
|
||||
type PrimitiveStringSerializer struct{}
|
||||
|
||||
func (pss PrimitiveStringSerializer) ValueToString(v any) (string, error) {
|
||||
|
||||
inType := reflect.TypeOf(v)
|
||||
|
||||
if inType.Kind() == reflect.Ptr && langext.IsNil(v) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if inType.Kind() == reflect.Ptr {
|
||||
rval1 := reflect.ValueOf(v)
|
||||
rval2 := rval1.Elem()
|
||||
rval3 := rval2.Interface()
|
||||
return pss.ValueToString(rval3)
|
||||
}
|
||||
|
||||
if conv, ok := primitiveSerializer[inType]; ok {
|
||||
return conv.ToString(v)
|
||||
}
|
||||
|
||||
for convType, conv := range primitiveSerializer {
|
||||
if castV, ok := TryCastType(v, convType); ok {
|
||||
return conv.ToString(castV)
|
||||
}
|
||||
}
|
||||
|
||||
return "", errors.New(fmt.Sprintf("failed to find a matching generic <toString> conversion fo type %T", v))
|
||||
}
|
||||
|
||||
func (pss PrimitiveStringSerializer) ValueFromString(str string, outType reflect.Type) (any, error) {
|
||||
|
||||
if outType.Kind() == reflect.Ptr && str == "" {
|
||||
return reflect.Zero(outType).Interface(), nil // = nil.(outType), nil
|
||||
}
|
||||
|
||||
if str == "" {
|
||||
return reflect.Zero(outType).Interface(), nil // = <default>(outType), nil
|
||||
}
|
||||
|
||||
if outType.Kind() == reflect.Ptr {
|
||||
|
||||
innerValue, err := pss.ValueFromString(str, outType.Elem())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this weird piece of shit converts innerValue to &innerValue (while keeping types)
|
||||
|
||||
rval1 := reflect.ValueOf(innerValue)
|
||||
rval2 := rval1.Convert(outType.Elem())
|
||||
rval3 := reflect.New(outType.Elem())
|
||||
rval3.Elem().Set(rval2)
|
||||
rval4 := rval3.Interface()
|
||||
|
||||
return rval4, nil
|
||||
}
|
||||
|
||||
if conv, ok := primitiveSerializer[outType]; ok {
|
||||
return conv.FromString(str)
|
||||
}
|
||||
|
||||
emptyResultVal := reflect.Zero(outType).Interface()
|
||||
|
||||
for convType, conv := range primitiveSerializer {
|
||||
if _, ok := TryCastType(emptyResultVal, convType); ok {
|
||||
if convVal, err := conv.FromString(str); err == nil {
|
||||
if resVal, ok := TryCastType(convVal, outType); ok {
|
||||
return resVal, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "", errors.New(fmt.Sprintf("failed to find a matching generic <toString> conversion fo type %s", outType.String()))
|
||||
}
|
240
rfctime/date.go
Normal file
240
rfctime/date.go
Normal file
@@ -0,0 +1,240 @@
|
||||
package rfctime
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/bson/bsoncodec"
|
||||
"go.mongodb.org/mongo-driver/bson/bsonrw"
|
||||
"go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Date struct {
|
||||
Year int
|
||||
Month int
|
||||
Day int
|
||||
}
|
||||
|
||||
func (t Date) Time(loc *time.Location) time.Time {
|
||||
return time.Date(t.Year, time.Month(t.Month), t.Day, 0, 0, 0, 0, loc)
|
||||
}
|
||||
|
||||
func (t Date) TimeUTC() time.Time {
|
||||
return time.Date(t.Year, time.Month(t.Month), t.Day, 0, 0, 0, 0, time.UTC)
|
||||
}
|
||||
|
||||
func (t Date) TimeLocal() time.Time {
|
||||
return time.Date(t.Year, time.Month(t.Month), t.Day, 0, 0, 0, 0, time.Local)
|
||||
}
|
||||
|
||||
func (t Date) MarshalBinary() ([]byte, error) {
|
||||
return t.TimeUTC().MarshalBinary()
|
||||
}
|
||||
|
||||
func (t *Date) UnmarshalBinary(data []byte) error {
|
||||
nt := time.Time{}
|
||||
if err := nt.UnmarshalBinary(data); err != nil {
|
||||
return err
|
||||
}
|
||||
t.Year = nt.Year()
|
||||
t.Month = int(nt.Month())
|
||||
t.Day = nt.Day()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t Date) GobEncode() ([]byte, error) {
|
||||
return t.TimeUTC().GobEncode()
|
||||
}
|
||||
|
||||
func (t *Date) GobDecode(data []byte) error {
|
||||
nt := time.Time{}
|
||||
if err := nt.GobDecode(data); err != nil {
|
||||
return err
|
||||
}
|
||||
t.Year = nt.Year()
|
||||
t.Month = int(nt.Month())
|
||||
t.Day = nt.Day()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *Date) UnmarshalJSON(data []byte) error {
|
||||
str := ""
|
||||
if err := json.Unmarshal(data, &str); err != nil {
|
||||
return err
|
||||
}
|
||||
t0, err := time.Parse(t.FormatStr(), str)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.Year = t0.Year()
|
||||
t.Month = int(t0.Month())
|
||||
t.Day = t0.Day()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t Date) MarshalJSON() ([]byte, error) {
|
||||
str := t.TimeUTC().Format(t.FormatStr())
|
||||
return json.Marshal(str)
|
||||
}
|
||||
|
||||
func (t Date) MarshalText() ([]byte, error) {
|
||||
b := make([]byte, 0, len(t.FormatStr()))
|
||||
return t.TimeUTC().AppendFormat(b, t.FormatStr()), nil
|
||||
}
|
||||
|
||||
func (t *Date) UnmarshalText(data []byte) error {
|
||||
var err error
|
||||
v, err := time.Parse(t.FormatStr(), string(data))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.Year = v.Year()
|
||||
t.Month = int(v.Month())
|
||||
t.Day = v.Day()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *Date) UnmarshalBSONValue(bt bsontype.Type, data []byte) error {
|
||||
if bt == bsontype.Null {
|
||||
// we can't set nil in UnmarshalBSONValue (so we use default(struct))
|
||||
// Use mongoext.CreateGoExtBsonRegistry if you need to unmarsh pointer values
|
||||
// https://stackoverflow.com/questions/75167597
|
||||
// https://jira.mongodb.org/browse/GODRIVER-2252
|
||||
*t = Date{}
|
||||
return nil
|
||||
}
|
||||
if bt != bsontype.String {
|
||||
return errors.New(fmt.Sprintf("cannot unmarshal %v into Date", bt))
|
||||
}
|
||||
|
||||
var tt string
|
||||
err := bson.RawValue{Type: bt, Value: data}.Unmarshal(&tt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
v, err := time.Parse(t.FormatStr(), tt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.Year = v.Year()
|
||||
t.Month = int(v.Month())
|
||||
t.Day = v.Day()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t Date) MarshalBSONValue() (bsontype.Type, []byte, error) {
|
||||
return bson.MarshalValue(t.TimeUTC().Format(t.FormatStr()))
|
||||
}
|
||||
|
||||
func (t Date) DecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
|
||||
if val.Kind() == reflect.Ptr && val.IsNil() {
|
||||
if !val.CanSet() {
|
||||
return errors.New("ValueUnmarshalerDecodeValue")
|
||||
}
|
||||
val.Set(reflect.New(val.Type().Elem()))
|
||||
}
|
||||
|
||||
tp, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if val.Kind() == reflect.Ptr && len(src) == 0 {
|
||||
val.Set(reflect.Zero(val.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
err = t.UnmarshalBSONValue(tp, src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if val.Kind() == reflect.Ptr {
|
||||
val.Set(reflect.ValueOf(&t))
|
||||
} else {
|
||||
val.Set(reflect.ValueOf(t))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t Date) Serialize() string {
|
||||
return t.TimeUTC().Format(t.FormatStr())
|
||||
}
|
||||
|
||||
func (t Date) FormatStr() string {
|
||||
return "2006-01-02"
|
||||
}
|
||||
|
||||
func (t Date) Date() (year int, month time.Month, day int) {
|
||||
return t.TimeUTC().Date()
|
||||
}
|
||||
|
||||
func (t Date) Weekday() time.Weekday {
|
||||
return t.TimeUTC().Weekday()
|
||||
}
|
||||
|
||||
func (t Date) ISOWeek() (year, week int) {
|
||||
return t.TimeUTC().ISOWeek()
|
||||
}
|
||||
|
||||
func (t Date) YearDay() int {
|
||||
return t.TimeUTC().YearDay()
|
||||
}
|
||||
|
||||
func (t Date) AddDate(years int, months int, days int) Date {
|
||||
return NewDate(t.TimeUTC().AddDate(years, months, days))
|
||||
}
|
||||
|
||||
func (t Date) Unix() int64 {
|
||||
return t.TimeUTC().Unix()
|
||||
}
|
||||
|
||||
func (t Date) UnixMilli() int64 {
|
||||
return t.TimeUTC().UnixMilli()
|
||||
}
|
||||
|
||||
func (t Date) UnixMicro() int64 {
|
||||
return t.TimeUTC().UnixMicro()
|
||||
}
|
||||
|
||||
func (t Date) UnixNano() int64 {
|
||||
return t.TimeUTC().UnixNano()
|
||||
}
|
||||
|
||||
func (t Date) Format(layout string) string {
|
||||
return t.TimeUTC().Format(layout)
|
||||
}
|
||||
|
||||
func (t Date) GoString() string {
|
||||
return t.TimeUTC().GoString()
|
||||
}
|
||||
|
||||
func (t Date) String() string {
|
||||
return t.TimeUTC().String()
|
||||
}
|
||||
|
||||
func NewDate(t time.Time) Date {
|
||||
return Date{
|
||||
Year: t.Year(),
|
||||
Month: int(t.Month()),
|
||||
Day: t.Day(),
|
||||
}
|
||||
}
|
||||
|
||||
func NowDate(loc *time.Location) Date {
|
||||
return NewDate(time.Now().In(loc))
|
||||
}
|
||||
|
||||
func NowDateLoc() Date {
|
||||
return NewDate(time.Now().In(time.UTC))
|
||||
}
|
||||
|
||||
func NowDateUTC() Date {
|
||||
return NewDate(time.Now().In(time.Local))
|
||||
}
|
@@ -2,6 +2,7 @@ package tst
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"runtime/debug"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@@ -54,12 +55,18 @@ func AssertHexEqual(t *testing.T, expected string, actual []byte) {
|
||||
|
||||
func AssertTrue(t *testing.T, value bool) {
|
||||
if !value {
|
||||
t.Error("value should be true")
|
||||
t.Error("value should be true\n" + string(debug.Stack()))
|
||||
}
|
||||
}
|
||||
|
||||
func AssertFalse(t *testing.T, value bool) {
|
||||
if value {
|
||||
t.Error("value should be false")
|
||||
t.Error("value should be false\n" + string(debug.Stack()))
|
||||
}
|
||||
}
|
||||
|
||||
func AssertNoErr(t *testing.T, anerr error) {
|
||||
if anerr != nil {
|
||||
t.Error("Function returned an error: " + anerr.Error() + "\n" + string(debug.Stack()))
|
||||
}
|
||||
}
|
||||
|
@@ -2,19 +2,35 @@ package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/bson/bsontype"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"go.mongodb.org/mongo-driver/mongo/options"
|
||||
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
type EntityID = any
|
||||
type EntityID interface {
|
||||
MarshalBSONValue() (bsontype.Type, []byte, error)
|
||||
String() string
|
||||
}
|
||||
|
||||
type fullTypeRef[TData any] struct {
|
||||
type Decodable interface {
|
||||
Decode(v any) error
|
||||
}
|
||||
|
||||
type Cursorable interface {
|
||||
Decode(v any) error
|
||||
Err() error
|
||||
Close(ctx context.Context) error
|
||||
All(ctx context.Context, results any) error
|
||||
RemainingBatchLength() int
|
||||
Next(ctx context.Context) bool
|
||||
}
|
||||
|
||||
type fullTypeRef struct {
|
||||
IsPointer bool
|
||||
Kind reflect.Kind
|
||||
RealType reflect.Type
|
||||
Type reflect.Type
|
||||
UnderlyingType reflect.Type
|
||||
Name string
|
||||
@@ -22,8 +38,11 @@ type fullTypeRef[TData any] struct {
|
||||
}
|
||||
|
||||
type Coll[TData any] struct {
|
||||
coll *mongo.Collection
|
||||
dataTypeMap map[string]fullTypeRef[TData]
|
||||
coll *mongo.Collection // internal mongo collection, access via Collection()
|
||||
dataTypeMap map[string]fullTypeRef // list of TData fields (only if TData is not an interface)
|
||||
implDataTypeMap map[reflect.Type]map[string]fullTypeRef // dynamic list of fields of TData implementations (only if TData is an interface)
|
||||
customDecoder *func(ctx context.Context, dec Decodable) (TData, error) // custom decoding function (useful if TData is an interface)
|
||||
isInterfaceDataType bool // true if TData is an interface (not a struct)
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) Collection() *mongo.Collection {
|
||||
@@ -34,6 +53,14 @@ func (c *Coll[TData]) Name() string {
|
||||
return c.coll.Name()
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) WithDecodeFunc(cdf func(ctx context.Context, dec Decodable) (TData, error), example TData) *Coll[TData] {
|
||||
|
||||
c.EnsureInitializedReflection(example)
|
||||
|
||||
c.customDecoder = langext.Ptr(cdf)
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) Indexes() mongo.IndexView {
|
||||
return c.coll.Indexes()
|
||||
}
|
||||
@@ -42,189 +69,6 @@ func (c *Coll[TData]) Drop(ctx context.Context) error {
|
||||
return c.coll.Drop(ctx)
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
|
||||
var res TData
|
||||
|
||||
err := c.coll.FindOne(ctx, filter).Decode(&res)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
|
||||
var res TData
|
||||
|
||||
err := c.coll.FindOne(ctx, filter).Decode(&res)
|
||||
if err == mongo.ErrNoDocuments {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
|
||||
var res TData
|
||||
|
||||
err := c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
|
||||
var res TData
|
||||
|
||||
err := c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res)
|
||||
if err == mongo.ErrNoDocuments {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) {
|
||||
cursor, err := c.coll.Find(ctx, filter, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := make([]TData, 0, cursor.RemainingBatchLength())
|
||||
err = cursor.All(ctx, &res)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) {
|
||||
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := make([]TData, 0, cursor.RemainingBatchLength())
|
||||
err = cursor.All(ctx, &res)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) ReplaceOne(ctx context.Context, id EntityID, value TData) error {
|
||||
_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error {
|
||||
_, err := c.coll.UpdateOne(ctx, filterQuery, updateQuery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuery bson.M) error {
|
||||
_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) UpdateOneAndReturn(ctx context.Context, id EntityID, updateQuery bson.M) (TData, error) {
|
||||
_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
var res TData
|
||||
|
||||
err = c.coll.FindOne(ctx, bson.M{"_id": id}).Decode(&res)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) DeleteOne(ctx context.Context, id EntityID) error {
|
||||
_, err := c.coll.DeleteOne(ctx, bson.M{"_id": id})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) DeleteMany(ctx context.Context, filterQuery bson.M) (*mongo.DeleteResult, error) {
|
||||
res, err := c.coll.DeleteMany(ctx, filterQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
|
||||
if inTok.Mode == ct.CTMEnd {
|
||||
return make([]TData, 0), ct.End(), nil
|
||||
}
|
||||
|
||||
pipeline := filter.FilterQuery()
|
||||
|
||||
sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary := filter.Pagination()
|
||||
|
||||
paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
|
||||
if err != nil {
|
||||
return nil, ct.CursorToken{}, err
|
||||
}
|
||||
|
||||
pipeline = append(pipeline, paginationPipeline...)
|
||||
|
||||
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
||||
if err != nil {
|
||||
return nil, ct.CursorToken{}, err
|
||||
}
|
||||
|
||||
entities := make([]TData, 0, cursor.RemainingBatchLength()+1)
|
||||
for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) {
|
||||
var entry TData
|
||||
err = cursor.Decode(&entry)
|
||||
if err != nil {
|
||||
return nil, ct.CursorToken{}, err
|
||||
}
|
||||
entities = append(entities, entry)
|
||||
}
|
||||
|
||||
if pageSize == nil || len(entities) <= *pageSize || !cursor.TryNext(ctx) {
|
||||
return entities, ct.End(), nil
|
||||
}
|
||||
|
||||
last := entities[len(entities)-1]
|
||||
|
||||
nextToken, _ := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
|
||||
|
||||
return entities, nextToken, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) createToken(fieldPrimary string, dirPrimary ct.SortDirection, fieldSecondary *string, dirSecondary *ct.SortDirection, lastEntity TData, pageSize *int) (ct.CursorToken, error) {
|
||||
|
||||
valuePrimary, err := c.getFieldValueAsTokenString(lastEntity, fieldPrimary)
|
||||
|
54
wmo/decoding.go
Normal file
54
wmo/decoding.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
func (c *Coll[TData]) decodeSingle(ctx context.Context, dec Decodable) (TData, error) {
|
||||
if c.customDecoder != nil {
|
||||
|
||||
return (*c.customDecoder)(ctx, dec)
|
||||
|
||||
} else {
|
||||
|
||||
var res TData
|
||||
|
||||
err := dec.Decode(&res)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) decodeAll(ctx context.Context, cursor Cursorable) ([]TData, error) {
|
||||
if c.customDecoder != nil {
|
||||
|
||||
res := make([]TData, 0, cursor.RemainingBatchLength())
|
||||
|
||||
for cursor.Next(ctx) {
|
||||
entry, err := (*c.customDecoder)(ctx, cursor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res = append(res, entry)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
|
||||
} else {
|
||||
|
||||
res := make([]TData, 0, cursor.RemainingBatchLength())
|
||||
|
||||
err := cursor.All(ctx, &res)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
|
||||
}
|
||||
|
||||
}
|
@@ -1,6 +1,7 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
||||
)
|
||||
@@ -57,7 +58,24 @@ func CreatePagination[TData any](coll *Coll[TData], token ct.CursorToken, fieldP
|
||||
|
||||
pipeline := make([]bson.D, 0, 3)
|
||||
|
||||
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
|
||||
if token.Mode == ct.CTMStart {
|
||||
|
||||
// no gt/lt condition
|
||||
|
||||
} else if token.Mode == ct.CTMNormal {
|
||||
|
||||
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$or": cond}}})
|
||||
|
||||
} else if token.Mode == ct.CTMEnd {
|
||||
|
||||
// false
|
||||
pipeline = append(pipeline, bson.D{{Key: "$match", Value: bson.M{"$expr": bson.M{"$eq": bson.A{"1", "0"}}}}})
|
||||
|
||||
} else {
|
||||
|
||||
return nil, errors.New("unknown ct mode: " + string(token.Mode))
|
||||
|
||||
}
|
||||
|
||||
pipeline = append(pipeline, bson.D{{Key: "$sort", Value: sort}})
|
||||
|
||||
|
56
wmo/queryAggregate.go
Normal file
56
wmo/queryAggregate.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"go.mongodb.org/mongo-driver/mongo/options"
|
||||
)
|
||||
|
||||
func (c *Coll[TData]) Aggregate(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) ([]TData, error) {
|
||||
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := c.decodeAll(ctx, cursor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) AggregateOneOpt(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (*TData, error) {
|
||||
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cursor.Next(ctx) {
|
||||
v, err := c.decodeSingle(ctx, cursor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &v, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) AggregateOne(ctx context.Context, pipeline mongo.Pipeline, opts ...*options.AggregateOptions) (TData, error) {
|
||||
cursor, err := c.coll.Aggregate(ctx, pipeline, opts...)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
if cursor.Next(ctx) {
|
||||
v, err := c.decodeSingle(ctx, cursor)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
return *new(TData), errors.New("no document in result")
|
||||
}
|
34
wmo/queryDelete.go
Normal file
34
wmo/queryDelete.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
)
|
||||
|
||||
func (c *Coll[TData]) DeleteOneByID(ctx context.Context, id EntityID) error {
|
||||
_, err := c.coll.DeleteOne(ctx, bson.M{"_id": id})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) DeleteOne(ctx context.Context, filterQuery bson.M) error {
|
||||
_, err := c.coll.DeleteOne(ctx, filterQuery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) DeleteMany(ctx context.Context, filterQuery bson.M) (*mongo.DeleteResult, error) {
|
||||
res, err := c.coll.DeleteMany(ctx, filterQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
62
wmo/queryFind.go
Normal file
62
wmo/queryFind.go
Normal file
@@ -0,0 +1,62 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"go.mongodb.org/mongo-driver/mongo/options"
|
||||
)
|
||||
|
||||
func (c *Coll[TData]) FindOne(ctx context.Context, filter bson.M) (TData, error) {
|
||||
mongoRes := c.coll.FindOne(ctx, filter)
|
||||
|
||||
return c.decodeSingle(ctx, mongoRes)
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) FindOneOpt(ctx context.Context, filter bson.M) (*TData, error) {
|
||||
mongoRes := c.coll.FindOne(ctx, filter)
|
||||
|
||||
res, err := c.decodeSingle(ctx, mongoRes)
|
||||
if err == mongo.ErrNoDocuments {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) FindOneByID(ctx context.Context, id EntityID) (TData, error) {
|
||||
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
|
||||
|
||||
return c.decodeSingle(ctx, mongoRes)
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) FindOneOptByID(ctx context.Context, id EntityID) (*TData, error) {
|
||||
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": id})
|
||||
|
||||
res, err := c.decodeSingle(ctx, mongoRes)
|
||||
if err == mongo.ErrNoDocuments {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) Find(ctx context.Context, filter bson.M, opts ...*options.FindOptions) ([]TData, error) {
|
||||
cursor, err := c.coll.Find(ctx, filter, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := c.decodeAll(ctx, cursor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
17
wmo/queryInsert.go
Normal file
17
wmo/queryInsert.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
)
|
||||
|
||||
func (c *Coll[TData]) InsertOne(ctx context.Context, valueIn TData) (TData, error) {
|
||||
insRes, err := c.coll.InsertOne(ctx, valueIn)
|
||||
if err != nil {
|
||||
return *new(TData), err
|
||||
}
|
||||
|
||||
mongoRes := c.coll.FindOne(ctx, bson.M{"_id": insRes.InsertedID})
|
||||
|
||||
return c.decodeSingle(ctx, mongoRes)
|
||||
}
|
107
wmo/queryList.go
Normal file
107
wmo/queryList.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
ct "gogs.mikescher.com/BlackForestBytes/goext/cursortoken"
|
||||
)
|
||||
|
||||
func (c *Coll[TData]) List(ctx context.Context, filter ct.Filter, pageSize *int, inTok ct.CursorToken) ([]TData, ct.CursorToken, error) {
|
||||
if inTok.Mode == ct.CTMEnd {
|
||||
return make([]TData, 0), ct.End(), nil
|
||||
}
|
||||
|
||||
pipeline := mongo.Pipeline{}
|
||||
pf1 := "_id"
|
||||
pd1 := ct.SortASC
|
||||
pf2 := "_id"
|
||||
pd2 := ct.SortASC
|
||||
|
||||
if filter != nil {
|
||||
pipeline = filter.FilterQuery()
|
||||
pf1, pd1, pf2, pd2 = filter.Pagination()
|
||||
}
|
||||
|
||||
sortPrimary := pf1
|
||||
sortDirPrimary := pd1
|
||||
sortSecondary := &pf2
|
||||
sortDirSecondary := &pd2
|
||||
|
||||
if pf1 == pf2 {
|
||||
sortSecondary = nil
|
||||
sortDirSecondary = nil
|
||||
}
|
||||
|
||||
paginationPipeline, err := CreatePagination(c, inTok, sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, pageSize)
|
||||
if err != nil {
|
||||
return nil, ct.CursorToken{}, err
|
||||
}
|
||||
|
||||
pipeline = append(pipeline, paginationPipeline...)
|
||||
|
||||
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
||||
if err != nil {
|
||||
return nil, ct.CursorToken{}, err
|
||||
}
|
||||
|
||||
// fast branch
|
||||
if pageSize == nil {
|
||||
entries, err := c.decodeAll(ctx, cursor)
|
||||
if err != nil {
|
||||
return nil, ct.CursorToken{}, err
|
||||
}
|
||||
return entries, ct.End(), nil
|
||||
}
|
||||
|
||||
entities := make([]TData, 0, cursor.RemainingBatchLength())
|
||||
for (pageSize == nil || len(entities) != *pageSize) && cursor.Next(ctx) {
|
||||
var entry TData
|
||||
entry, err = c.decodeSingle(ctx, cursor)
|
||||
if err != nil {
|
||||
return nil, ct.CursorToken{}, err
|
||||
}
|
||||
entities = append(entities, entry)
|
||||
}
|
||||
|
||||
if pageSize == nil || len(entities) < *pageSize || !cursor.TryNext(ctx) {
|
||||
return entities, ct.End(), nil
|
||||
}
|
||||
|
||||
last := entities[len(entities)-1]
|
||||
|
||||
c.EnsureInitializedReflection(last)
|
||||
|
||||
nextToken, err := c.createToken(sortPrimary, sortDirPrimary, sortSecondary, sortDirSecondary, last, pageSize)
|
||||
if err != nil {
|
||||
return nil, ct.CursorToken{}, err
|
||||
}
|
||||
|
||||
return entities, nextToken, nil
|
||||
}
|
||||
|
||||
type countRes struct {
|
||||
Count int64 `bson:"c"`
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) Count(ctx context.Context, filter ct.Filter) (int64, error) {
|
||||
pipeline := filter.FilterQuery()
|
||||
|
||||
pipeline = append(pipeline, bson.D{{Key: "$count", Value: "c"}})
|
||||
|
||||
cursor, err := c.coll.Aggregate(ctx, pipeline)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if cursor.Next(ctx) {
|
||||
v := countRes{}
|
||||
err = cursor.Decode(&v)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return v.Count, nil
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
56
wmo/queryUpdate.go
Normal file
56
wmo/queryUpdate.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"go.mongodb.org/mongo-driver/bson"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"go.mongodb.org/mongo-driver/mongo/options"
|
||||
)
|
||||
|
||||
func (c *Coll[TData]) FindOneAndUpdate(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (TData, error) {
|
||||
mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, updateQuery, options.FindOneAndUpdate().SetReturnDocument(options.After))
|
||||
|
||||
return c.decodeSingle(ctx, mongoRes)
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) UpdateOne(ctx context.Context, filterQuery bson.M, updateQuery bson.M) error {
|
||||
_, err := c.coll.UpdateOne(ctx, filterQuery, updateQuery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) UpdateOneByID(ctx context.Context, id EntityID, updateQuery bson.M) error {
|
||||
_, err := c.coll.UpdateOne(ctx, bson.M{"_id": id}, updateQuery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) UpdateMany(ctx context.Context, filterQuery bson.M, updateQuery bson.M) (*mongo.UpdateResult, error) {
|
||||
res, err := c.coll.UpdateMany(ctx, filterQuery, updateQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) ReplaceOne(ctx context.Context, filterQuery bson.M, value TData) error {
|
||||
_, err := c.coll.UpdateOne(ctx, filterQuery, bson.M{"$set": value})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) FindOneAndReplace(ctx context.Context, filterQuery bson.M, value TData) (TData, error) {
|
||||
mongoRes := c.coll.FindOneAndUpdate(ctx, filterQuery, bson.M{"$set": value}, options.FindOneAndUpdate().SetReturnDocument(options.After))
|
||||
|
||||
return c.decodeSingle(ctx, mongoRes)
|
||||
}
|
@@ -2,26 +2,61 @@ package wmo
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/reflectext"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"time"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func (c *Coll[TData]) EnsureInitializedReflection(v TData) {
|
||||
|
||||
if !c.isInterfaceDataType {
|
||||
return // only dynamically load dataTypeMap on interface TData
|
||||
}
|
||||
|
||||
rval := reflect.ValueOf(v)
|
||||
for rval.Type().Kind() == reflect.Pointer {
|
||||
rval = rval.Elem()
|
||||
}
|
||||
|
||||
if _, ok := c.implDataTypeMap[rval.Type()]; ok {
|
||||
return // already loaded
|
||||
}
|
||||
|
||||
m := make(map[string]fullTypeRef)
|
||||
|
||||
c.initFields("", rval, m, make([]int, 0))
|
||||
|
||||
c.implDataTypeMap[rval.Type()] = m
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) init() {
|
||||
|
||||
c.dataTypeMap = make(map[string]fullTypeRef[TData])
|
||||
|
||||
example := *new(TData)
|
||||
|
||||
v := reflect.ValueOf(example)
|
||||
datatype := reflect.TypeOf(&example).Elem()
|
||||
|
||||
c.initFields("", v, make([]int, 0))
|
||||
if datatype.Kind() == reflect.Interface {
|
||||
|
||||
c.isInterfaceDataType = true
|
||||
|
||||
c.dataTypeMap = make(map[string]fullTypeRef)
|
||||
c.implDataTypeMap = make(map[reflect.Type]map[string]fullTypeRef)
|
||||
} else {
|
||||
|
||||
c.isInterfaceDataType = false
|
||||
|
||||
c.dataTypeMap = make(map[string]fullTypeRef)
|
||||
c.implDataTypeMap = make(map[reflect.Type]map[string]fullTypeRef)
|
||||
|
||||
v := reflect.ValueOf(example)
|
||||
c.initFields("", v, c.dataTypeMap, make([]int, 0))
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, idxarr []int) {
|
||||
func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, m map[string]fullTypeRef, idxarr []int) {
|
||||
|
||||
rtyp := rval.Type()
|
||||
|
||||
@@ -34,42 +69,65 @@ func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, idxarr []int
|
||||
continue
|
||||
}
|
||||
|
||||
bsontags := make([]string, 0)
|
||||
bsonkey, found := rsfield.Tag.Lookup("bson")
|
||||
if !found || bsonkey == "-" {
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
if strings.Contains(bsonkey, ",") {
|
||||
bsontags = strings.Split(bsonkey[strings.Index(bsonkey, ",")+1:], ",")
|
||||
bsonkey = bsonkey[:strings.Index(bsonkey, ",")]
|
||||
}
|
||||
if bsonkey == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
if bsonkey == "" {
|
||||
bsonkey = rsfield.Name
|
||||
}
|
||||
|
||||
fullKey := prefix + bsonkey
|
||||
|
||||
newIdxArr := langext.ArrCopy(idxarr)
|
||||
newIdxArr = append(newIdxArr, i)
|
||||
|
||||
if rvfield.Type().Kind() == reflect.Pointer {
|
||||
if langext.InArray("inline", bsontags) && rvfield.Kind() == reflect.Struct {
|
||||
|
||||
c.dataTypeMap[fullKey] = fullTypeRef[TData]{
|
||||
IsPointer: true,
|
||||
Kind: rvfield.Type().Elem().Kind(),
|
||||
Type: rvfield.Type().Elem(),
|
||||
UnderlyingType: langext.Underlying(rvfield.Type().Elem()),
|
||||
Name: rsfield.Name,
|
||||
Index: newIdxArr,
|
||||
}
|
||||
// pass-through field
|
||||
c.initFields(prefix, rvfield, m, newIdxArr)
|
||||
|
||||
} else {
|
||||
|
||||
c.dataTypeMap[fullKey] = fullTypeRef[TData]{
|
||||
IsPointer: false,
|
||||
Kind: rvfield.Type().Kind(),
|
||||
Type: rvfield.Type(),
|
||||
UnderlyingType: langext.Underlying(rvfield.Type()),
|
||||
Name: rsfield.Name,
|
||||
Index: newIdxArr,
|
||||
if rvfield.Type().Kind() == reflect.Pointer {
|
||||
|
||||
m[fullKey] = fullTypeRef{
|
||||
IsPointer: true,
|
||||
RealType: rvfield.Type(),
|
||||
Kind: rvfield.Type().Elem().Kind(),
|
||||
Type: rvfield.Type().Elem(),
|
||||
UnderlyingType: reflectext.Underlying(rvfield.Type().Elem()),
|
||||
Name: rsfield.Name,
|
||||
Index: newIdxArr,
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
m[fullKey] = fullTypeRef{
|
||||
IsPointer: false,
|
||||
RealType: rvfield.Type(),
|
||||
Kind: rvfield.Type().Kind(),
|
||||
Type: rvfield.Type(),
|
||||
UnderlyingType: reflectext.Underlying(rvfield.Type()),
|
||||
Name: rsfield.Name,
|
||||
Index: newIdxArr,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
if rvfield.Kind() == reflect.Struct {
|
||||
c.initFields(fullKey+".", rvfield, m, newIdxArr)
|
||||
}
|
||||
|
||||
if rvfield.Kind() == reflect.Struct {
|
||||
c.initFields(fullKey+".", rvfield, newIdxArr)
|
||||
}
|
||||
|
||||
}
|
||||
@@ -77,218 +135,81 @@ func (c *Coll[TData]) initFields(prefix string, rval reflect.Value, idxarr []int
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) getTokenValueAsMongoType(value string, fieldName string) (any, error) {
|
||||
fref := c.dataTypeMap[fieldName]
|
||||
|
||||
if fref.IsPointer && value == "" {
|
||||
pointertype := reflect.New(fref.Type).Type()
|
||||
nilvalue := reflect.Zero(pointertype)
|
||||
outvalue := nilvalue.Interface()
|
||||
return outvalue, nil
|
||||
fref, err := c.getFieldType(fieldName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pointerize := func(v any) any {
|
||||
if !fref.IsPointer {
|
||||
return v
|
||||
}
|
||||
pss := reflectext.PrimitiveStringSerializer{}
|
||||
|
||||
rval1 := reflect.ValueOf(v)
|
||||
rval2 := rval1.Convert(fref.Type)
|
||||
rval3 := reflect.New(fref.Type)
|
||||
rval3.Elem().Set(rval2)
|
||||
return rval3.Interface()
|
||||
}
|
||||
return pss.ValueFromString(value, fref.RealType)
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf("") {
|
||||
|
||||
rt, ok := langext.TryCastType(value, fref.Type)
|
||||
if !ok {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from string to %s", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return pointerize(rt), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(time.Time{}) {
|
||||
|
||||
t, err := time.Parse(time.RFC3339Nano, value)
|
||||
if err != nil {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as time.Time ('%s')", fieldName, value))
|
||||
}
|
||||
|
||||
rt, ok := langext.TryCastType(t, fref.Type)
|
||||
if !ok {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from time.Time to %s", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return pointerize(rt), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(int(0)) {
|
||||
|
||||
t, err := strconv.ParseInt(value, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int64 ('%s')", fieldName, value))
|
||||
}
|
||||
|
||||
rt, ok := langext.TryCastType(int(t), fref.Type)
|
||||
if !ok {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int to %s", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return pointerize(rt), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(int32(0)) {
|
||||
|
||||
t, err := strconv.ParseInt(value, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int32 ('%s')", fieldName, value))
|
||||
}
|
||||
|
||||
rt, ok := langext.TryCastType(int32(t), fref.Type)
|
||||
if !ok {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int32 to %s", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return pointerize(rt), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(int64(0)) {
|
||||
|
||||
t, err := strconv.ParseInt(value, 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as int64 ('%s')", fieldName, value))
|
||||
}
|
||||
|
||||
rt, ok := langext.TryCastType(int64(t), fref.Type)
|
||||
if !ok {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from int64 to %s", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return pointerize(rt), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(float32(0)) {
|
||||
|
||||
t, err := strconv.ParseFloat(value, 64)
|
||||
if err != nil {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as float32 ('%s')", fieldName, value))
|
||||
}
|
||||
|
||||
rt, ok := langext.TryCastType(float32(t), fref.Type)
|
||||
if !ok {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from float32 to %s", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return pointerize(rt), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(float64(0)) {
|
||||
|
||||
t, err := strconv.ParseFloat(value, 64)
|
||||
if err != nil {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' as float64 ('%s')", fieldName, value))
|
||||
}
|
||||
|
||||
rt, ok := langext.TryCastType(float64(t), fref.Type)
|
||||
if !ok {
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' from float64 to %s", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return pointerize(rt), nil
|
||||
}
|
||||
|
||||
return nil, errors.New(fmt.Sprintf("failed to parse field '%s' of type %s (%s)", fieldName, fref.Type.String(), fref.UnderlyingType.String()))
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) getFieldValueAsTokenString(entity TData, fieldName string) (string, error) {
|
||||
fref := c.dataTypeMap[fieldName]
|
||||
|
||||
realValue := c.getFieldValue(entity, fieldName)
|
||||
|
||||
if langext.IsNil(realValue) {
|
||||
return "", nil
|
||||
realValue, err := c.getFieldValue(entity, fieldName)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
reflValue := reflect.ValueOf(realValue)
|
||||
if reflValue.Kind() == reflect.Pointer {
|
||||
reflValue = reflValue.Elem()
|
||||
realValue = reflValue.Interface()
|
||||
}
|
||||
pss := reflectext.PrimitiveStringSerializer{}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf("") {
|
||||
return pss.ValueToString(realValue)
|
||||
|
||||
rt, ok := langext.TryCastType(realValue, reflect.TypeOf(""))
|
||||
if !ok {
|
||||
return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to string", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return rt.(string), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(time.Time{}) {
|
||||
rt, ok := langext.TryCastType(realValue, reflect.TypeOf(time.Time{}))
|
||||
if !ok {
|
||||
return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to time.Time", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return rt.(time.Time).Format(time.RFC3339Nano), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(int(0)) {
|
||||
rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int(0)))
|
||||
if !ok {
|
||||
return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return strconv.Itoa(rt.(int)), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(int32(0)) {
|
||||
rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int32(0)))
|
||||
if !ok {
|
||||
return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int32", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return strconv.FormatInt(int64(rt.(int32)), 10), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(int64(0)) {
|
||||
rt, ok := langext.TryCastType(realValue, reflect.TypeOf(int64(0)))
|
||||
if !ok {
|
||||
return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to int64", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return strconv.FormatInt(rt.(int64), 10), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(float32(0)) {
|
||||
rt, ok := langext.TryCastType(realValue, reflect.TypeOf(float32(0)))
|
||||
if !ok {
|
||||
return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to float32", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return strconv.FormatFloat(float64(rt.(float32)), 'f', -1, 32), nil
|
||||
}
|
||||
|
||||
if fref.UnderlyingType == reflect.TypeOf(float64(0)) {
|
||||
rt, ok := langext.TryCastType(realValue, reflect.TypeOf(float64(0)))
|
||||
if !ok {
|
||||
return "", errors.New(fmt.Sprintf("failed to cast field '%s' from %s to float64", fieldName, fref.Type.String()))
|
||||
}
|
||||
|
||||
return strconv.FormatFloat(rt.(float64), 'f', -1, 64), nil
|
||||
}
|
||||
|
||||
return "", errors.New(fmt.Sprintf("failed to parse field '%s' of type %s (%s)", fieldName, fref.Type.String(), fref.UnderlyingType.String()))
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) getFieldType(fieldName string) fullTypeRef[TData] {
|
||||
return c.dataTypeMap[fieldName]
|
||||
func (c *Coll[TData]) getFieldType(fieldName string) (fullTypeRef, error) {
|
||||
if c.isInterfaceDataType {
|
||||
|
||||
for _, m := range c.implDataTypeMap {
|
||||
if r, ok := m[fieldName]; ok {
|
||||
return r, nil
|
||||
}
|
||||
}
|
||||
|
||||
return fullTypeRef{}, errors.New("unknown field: '" + fieldName + "' (in any impl)")
|
||||
|
||||
} else {
|
||||
|
||||
if r, ok := c.dataTypeMap[fieldName]; ok {
|
||||
return r, nil
|
||||
} else {
|
||||
return fullTypeRef{}, errors.New("unknown field: '" + fieldName + "'")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (c *Coll[TData]) getFieldValue(data TData, fieldName string) any {
|
||||
fref := c.dataTypeMap[fieldName]
|
||||
rval := reflect.ValueOf(data)
|
||||
return rval.FieldByIndex(fref.Index).Interface()
|
||||
func (c *Coll[TData]) getFieldValue(data TData, fieldName string) (any, error) {
|
||||
if c.isInterfaceDataType {
|
||||
|
||||
rval := reflect.ValueOf(data)
|
||||
for rval.Type().Kind() == reflect.Pointer {
|
||||
rval = rval.Elem()
|
||||
}
|
||||
|
||||
if m, ok := c.implDataTypeMap[rval.Type()]; ok {
|
||||
if fref, ok := m[fieldName]; ok {
|
||||
rval := reflect.ValueOf(data)
|
||||
return rval.FieldByIndex(fref.Index).Interface(), nil
|
||||
} else {
|
||||
return nil, errors.New("unknown bson field '" + fieldName + "' in type '" + rval.Type().String() + "'")
|
||||
}
|
||||
} else {
|
||||
return nil, errors.New("unknown TData type: '" + rval.Type().String() + "'")
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
if fref, ok := c.dataTypeMap[fieldName]; ok {
|
||||
rval := reflect.ValueOf(data)
|
||||
return rval.FieldByIndex(fref.Index).Interface(), nil
|
||||
} else {
|
||||
return nil, errors.New("unknown bson field '" + fieldName + "'")
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -1,9 +1,14 @@
|
||||
package wmo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"go.mongodb.org/mongo-driver/bson/primitive"
|
||||
"go.mongodb.org/mongo-driver/mongo"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/rfctime"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
|
||||
"gogs.mikescher.com/BlackForestBytes/goext/tst"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
@@ -18,8 +23,9 @@ func TestReflectionGetFieldType(t *testing.T) {
|
||||
Sub struct {
|
||||
A string `bson:"a"`
|
||||
} `bson:"sub"`
|
||||
Str string `bson:"str"`
|
||||
Ptr *int `bson:"ptr"`
|
||||
Str string `bson:"str"`
|
||||
Ptr *int `bson:"ptr"`
|
||||
MDate rfctime.RFC3339NanoTime `bson:"mdate"`
|
||||
}
|
||||
|
||||
coll := W[TestData](&mongo.Collection{})
|
||||
@@ -27,6 +33,7 @@ func TestReflectionGetFieldType(t *testing.T) {
|
||||
coll.init()
|
||||
|
||||
t0 := time.Now()
|
||||
t1 := rfctime.NewRFC3339Nano(t0)
|
||||
|
||||
d := TestData{
|
||||
ID: "1",
|
||||
@@ -36,39 +43,56 @@ func TestReflectionGetFieldType(t *testing.T) {
|
||||
}{
|
||||
A: "2",
|
||||
},
|
||||
Str: "3",
|
||||
Ptr: langext.Ptr(4),
|
||||
Str: "3",
|
||||
Ptr: langext.Ptr(4),
|
||||
MDate: t1,
|
||||
}
|
||||
|
||||
tst.AssertEqual(t, coll.getFieldType("_id").Kind.String(), "string")
|
||||
tst.AssertEqual(t, coll.getFieldType("_id").Type.String(), "wmo.IDType")
|
||||
tst.AssertEqual(t, coll.getFieldType("_id").Name, "ID")
|
||||
tst.AssertEqual(t, coll.getFieldType("_id").IsPointer, false)
|
||||
tst.AssertEqual(t, coll.getFieldValue(d, "_id").(IDType), "1")
|
||||
gft := func(k string) fullTypeRef {
|
||||
v, err := coll.getFieldType(k)
|
||||
if err != nil {
|
||||
t.Errorf("%s: %v", "failed to getFieldType", err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
tst.AssertEqual(t, coll.getFieldType("cdate").Kind.String(), "struct")
|
||||
tst.AssertEqual(t, coll.getFieldType("cdate").Type.String(), "time.Time")
|
||||
tst.AssertEqual(t, coll.getFieldType("cdate").Name, "CDate")
|
||||
tst.AssertEqual(t, coll.getFieldType("cdate").IsPointer, false)
|
||||
tst.AssertEqual(t, coll.getFieldValue(d, "cdate").(time.Time), t0)
|
||||
gfv := func(k string) any {
|
||||
v, err := coll.getFieldValue(d, k)
|
||||
if err != nil {
|
||||
t.Errorf("%s: %v", "failed to getFieldType", err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
tst.AssertEqual(t, coll.getFieldType("sub.a").Kind.String(), "string")
|
||||
tst.AssertEqual(t, coll.getFieldType("sub.a").Type.String(), "string")
|
||||
tst.AssertEqual(t, coll.getFieldType("sub.a").Name, "A")
|
||||
tst.AssertEqual(t, coll.getFieldType("sub.a").IsPointer, false)
|
||||
tst.AssertEqual(t, coll.getFieldValue(d, "sub.a").(string), "2")
|
||||
tst.AssertEqual(t, gft("_id").Kind.String(), "string")
|
||||
tst.AssertEqual(t, gft("_id").Type.String(), "wmo.IDType")
|
||||
tst.AssertEqual(t, gft("_id").Name, "ID")
|
||||
tst.AssertEqual(t, gft("_id").IsPointer, false)
|
||||
tst.AssertEqual(t, gfv("_id").(IDType), "1")
|
||||
|
||||
tst.AssertEqual(t, coll.getFieldType("str").Kind.String(), "string")
|
||||
tst.AssertEqual(t, coll.getFieldType("str").Type.String(), "string")
|
||||
tst.AssertEqual(t, coll.getFieldType("str").Name, "Str")
|
||||
tst.AssertEqual(t, coll.getFieldType("str").IsPointer, false)
|
||||
tst.AssertEqual(t, coll.getFieldValue(d, "str").(string), "3")
|
||||
tst.AssertEqual(t, gft("cdate").Kind.String(), "struct")
|
||||
tst.AssertEqual(t, gft("cdate").Type.String(), "time.Time")
|
||||
tst.AssertEqual(t, gft("cdate").Name, "CDate")
|
||||
tst.AssertEqual(t, gft("cdate").IsPointer, false)
|
||||
tst.AssertEqual(t, gfv("cdate").(time.Time), t0)
|
||||
|
||||
tst.AssertEqual(t, coll.getFieldType("ptr").Kind.String(), "int")
|
||||
tst.AssertEqual(t, coll.getFieldType("ptr").Type.String(), "int")
|
||||
tst.AssertEqual(t, coll.getFieldType("ptr").Name, "Ptr")
|
||||
tst.AssertEqual(t, coll.getFieldType("ptr").IsPointer, true)
|
||||
tst.AssertEqual(t, *coll.getFieldValue(d, "ptr").(*int), 4)
|
||||
tst.AssertEqual(t, gft("sub.a").Kind.String(), "string")
|
||||
tst.AssertEqual(t, gft("sub.a").Type.String(), "string")
|
||||
tst.AssertEqual(t, gft("sub.a").Name, "A")
|
||||
tst.AssertEqual(t, gft("sub.a").IsPointer, false)
|
||||
tst.AssertEqual(t, gfv("sub.a").(string), "2")
|
||||
|
||||
tst.AssertEqual(t, gft("str").Kind.String(), "string")
|
||||
tst.AssertEqual(t, gft("str").Type.String(), "string")
|
||||
tst.AssertEqual(t, gft("str").Name, "Str")
|
||||
tst.AssertEqual(t, gft("str").IsPointer, false)
|
||||
tst.AssertEqual(t, gfv("str").(string), "3")
|
||||
|
||||
tst.AssertEqual(t, gft("ptr").Kind.String(), "int")
|
||||
tst.AssertEqual(t, gft("ptr").Type.String(), "int")
|
||||
tst.AssertEqual(t, gft("ptr").Name, "Ptr")
|
||||
tst.AssertEqual(t, gft("ptr").IsPointer, true)
|
||||
tst.AssertEqual(t, *gfv("ptr").(*int), 4)
|
||||
}
|
||||
|
||||
func TestReflectionGetTokenValueAsMongoType(t *testing.T) {
|
||||
@@ -81,9 +105,10 @@ func TestReflectionGetTokenValueAsMongoType(t *testing.T) {
|
||||
Sub struct {
|
||||
A string `bson:"a"`
|
||||
} `bson:"sub"`
|
||||
Str string `bson:"str"`
|
||||
Ptr *int `bson:"ptr"`
|
||||
Num int `bson:"num"`
|
||||
Str string `bson:"str"`
|
||||
Ptr *int `bson:"ptr"`
|
||||
Num int `bson:"num"`
|
||||
MDate rfctime.RFC3339NanoTime `bson:"mdate"`
|
||||
}
|
||||
|
||||
coll := W[TestData](&mongo.Collection{})
|
||||
@@ -94,15 +119,23 @@ func TestReflectionGetTokenValueAsMongoType(t *testing.T) {
|
||||
v, err := coll.getTokenValueAsMongoType(value, fieldName)
|
||||
if err != nil {
|
||||
t.Errorf("%s", "failed to getTokenValueAsMongoType")
|
||||
t.Errorf("%v+", err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
tx, err := time.Parse(time.RFC3339Nano, "2009-11-10T23:00:00Z")
|
||||
if err != nil {
|
||||
t.Errorf("%v", err)
|
||||
}
|
||||
|
||||
tst.AssertEqual(t, gtvasmt("hello", "str").(string), "hello")
|
||||
tst.AssertEqual(t, gtvasmt("4", "num").(int), 4)
|
||||
tst.AssertEqual(t, gtvasmt("asdf", "_id").(IDType), "asdf")
|
||||
tst.AssertEqual(t, gtvasmt("", "ptr").(*int), nil)
|
||||
tst.AssertEqual(t, *(gtvasmt("123", "ptr").(*int)), 123)
|
||||
tst.AssertEqual(t, gtvasmt("2009-11-10T23:00:00Z", "cdate").(time.Time), tx)
|
||||
tst.AssertEqual(t, gtvasmt("2009-11-10T23:00:00Z", "mdate").(rfctime.RFC3339NanoTime), rfctime.NewRFC3339Nano(tx))
|
||||
}
|
||||
|
||||
func TestReflectionGetFieldValueAsTokenString(t *testing.T) {
|
||||
@@ -115,22 +148,25 @@ func TestReflectionGetFieldValueAsTokenString(t *testing.T) {
|
||||
Sub struct {
|
||||
A string `bson:"a"`
|
||||
} `bson:"sub"`
|
||||
Str string `bson:"str"`
|
||||
Ptr *int `bson:"ptr"`
|
||||
Num int `bson:"num"`
|
||||
Ptr2 *int `bson:"ptr2"`
|
||||
FFF float64 `bson:"fff"`
|
||||
Str string `bson:"str"`
|
||||
Ptr *int `bson:"ptr"`
|
||||
Num int `bson:"num"`
|
||||
Ptr2 *int `bson:"ptr2"`
|
||||
FFF float64 `bson:"fff"`
|
||||
MDate rfctime.RFC3339NanoTime `bson:"mdate"`
|
||||
}
|
||||
|
||||
coll := W[TestData](&mongo.Collection{})
|
||||
|
||||
coll.init()
|
||||
|
||||
t0 := time.Now()
|
||||
t0 := time.Date(2000, 1, 1, 12, 0, 0, 0, timeext.TimezoneBerlin)
|
||||
t1 := rfctime.NewRFC3339Nano(t0)
|
||||
|
||||
d := TestData{
|
||||
ID: "1",
|
||||
CDate: t0,
|
||||
MDate: t1,
|
||||
Sub: struct {
|
||||
A string `bson:"a"`
|
||||
}{
|
||||
@@ -146,7 +182,7 @@ func TestReflectionGetFieldValueAsTokenString(t *testing.T) {
|
||||
gfvats := func(value TestData, fieldName string) string {
|
||||
v, err := coll.getFieldValueAsTokenString(value, fieldName)
|
||||
if err != nil {
|
||||
t.Errorf("%s", "failed to getTokenValueAsMongoType")
|
||||
t.Errorf("%s: %v", "failed to getTokenValueAsMongoType", err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
@@ -157,4 +193,34 @@ func TestReflectionGetFieldValueAsTokenString(t *testing.T) {
|
||||
tst.AssertEqual(t, gfvats(d, "ptr"), "4")
|
||||
tst.AssertEqual(t, gfvats(d, "ptr2"), "")
|
||||
tst.AssertEqual(t, gfvats(d, "fff"), "22.5")
|
||||
tst.AssertEqual(t, gfvats(d, "cdate"), t0.Format(time.RFC3339Nano))
|
||||
tst.AssertEqual(t, gfvats(d, "mdate"), t0.Format(time.RFC3339Nano))
|
||||
}
|
||||
|
||||
func TestReflectionWithInterface(t *testing.T) {
|
||||
|
||||
type TestData struct {
|
||||
ID primitive.ObjectID `bson:"_id"`
|
||||
CDate time.Time `bson:"cdate"`
|
||||
}
|
||||
|
||||
type TestInterface interface {
|
||||
}
|
||||
|
||||
coll1 := W[TestInterface](&mongo.Collection{})
|
||||
|
||||
tst.AssertTrue(t, coll1.coll != nil)
|
||||
tst.AssertEqual(t, 0, len(coll1.implDataTypeMap))
|
||||
|
||||
df := func(ctx context.Context, dec Decodable) (TestInterface, error) {
|
||||
return TestData{}, nil
|
||||
}
|
||||
|
||||
coll2 := W[TestInterface](&mongo.Collection{}).WithDecodeFunc(df, TestData{})
|
||||
|
||||
tst.AssertTrue(t, coll2.coll != nil)
|
||||
tst.AssertEqual(t, 1, len(coll2.implDataTypeMap))
|
||||
|
||||
tst.AssertEqual(t, "ID", coll2.implDataTypeMap[reflect.TypeOf(TestData{})]["_id"].Name)
|
||||
tst.AssertEqual(t, "CDate", coll2.implDataTypeMap[reflect.TypeOf(TestData{})]["cdate"].Name)
|
||||
}
|
||||
|
Reference in New Issue
Block a user