Compare commits
11 Commits
Author | SHA1 | Date | |
---|---|---|---|
b2b93f570a
|
|||
8247fc4524
|
|||
5dad44ad09
|
|||
f042183433
|
|||
b0be93a7a0
|
|||
1c143921e6
|
|||
68e63a9cf6
|
|||
c3162fec95
|
|||
1124aa781a
|
|||
eef0e9f2aa
|
|||
af38b06d22
|
BIN
bfcodegen/_test_example_2.tgz
Normal file
BIN
bfcodegen/_test_example_2.tgz
Normal file
Binary file not shown.
@@ -70,9 +70,9 @@ func GenerateCharsetIDSpecs(sourceDir string, destFile string) error {
|
|||||||
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
|
newChecksum := cryptext.BytesSha256([]byte(newChecksumStr))
|
||||||
|
|
||||||
if newChecksum != oldChecksum {
|
if newChecksum != oldChecksum {
|
||||||
fmt.Printf("[IDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
fmt.Printf("[CSIDGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf("[IDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
fmt.Printf("[CSIDGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -12,8 +12,8 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed _test_example.tgz
|
//go:embed _test_example_1.tgz
|
||||||
var CSIDExampleModels []byte
|
var CSIDExampleModels1 []byte
|
||||||
|
|
||||||
func TestGenerateCSIDSpecs(t *testing.T) {
|
func TestGenerateCSIDSpecs(t *testing.T) {
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ func TestGenerateCSIDSpecs(t *testing.T) {
|
|||||||
|
|
||||||
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||||
|
|
||||||
err := os.WriteFile(tmpFile, CSIDExampleModels, 0o777)
|
err := os.WriteFile(tmpFile, CSIDExampleModels1, 0o777)
|
||||||
tst.AssertNoErr(t, err)
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||||
|
@@ -3,6 +3,7 @@ package bfcodegen
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
_ "embed"
|
_ "embed"
|
||||||
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/format"
|
"go/format"
|
||||||
@@ -14,6 +15,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
@@ -23,6 +25,8 @@ type EnumDefVal struct {
|
|||||||
VarName string
|
VarName string
|
||||||
Value string
|
Value string
|
||||||
Description *string
|
Description *string
|
||||||
|
Data *map[string]any
|
||||||
|
RawComment *string
|
||||||
}
|
}
|
||||||
|
|
||||||
type EnumDef struct {
|
type EnumDef struct {
|
||||||
@@ -37,7 +41,7 @@ var rexEnumPackage = rext.W(regexp.MustCompile(`^package\s+(?P<name>[A-Za-z0-9_]
|
|||||||
|
|
||||||
var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
|
var rexEnumDef = rext.W(regexp.MustCompile(`^\s*type\s+(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*//\s*(@enum:type).*$`))
|
||||||
|
|
||||||
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<descr>.*))?.*$`))
|
var rexEnumValueDef = rext.W(regexp.MustCompile(`^\s*(?P<name>[A-Za-z0-9_]+)\s+(?P<type>[A-Za-z0-9_]+)\s*=\s*(?P<value>("[A-Za-z0-9_:\s\-.]+"|[0-9]+))\s*(//(?P<comm>.*))?.*$`))
|
||||||
|
|
||||||
var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
|
var rexEnumChecksumConst = rext.W(regexp.MustCompile(`const ChecksumEnumGenerator = "(?P<cs>[A-Za-z0-9_]*)"`))
|
||||||
|
|
||||||
@@ -46,11 +50,6 @@ var templateEnumGenerateText string
|
|||||||
|
|
||||||
func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
||||||
|
|
||||||
files, err := os.ReadDir(sourceDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
oldChecksum := "N/A"
|
oldChecksum := "N/A"
|
||||||
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
if _, err := os.Stat(destFile); !os.IsNotExist(err) {
|
||||||
content, err := os.ReadFile(destFile)
|
content, err := os.ReadFile(destFile)
|
||||||
@@ -62,6 +61,30 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gocode, _, changed, err := _generateEnumSpecs(sourceDir, destFile, oldChecksum, true)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !changed {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.WriteFile(destFile, []byte(gocode), 0o755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func _generateEnumSpecs(sourceDir string, destFile string, oldChecksum string, gofmt bool) (string, string, bool, error) {
|
||||||
|
|
||||||
|
files, err := os.ReadDir(sourceDir)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", false, err
|
||||||
|
}
|
||||||
|
|
||||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return v.Name() != path.Base(destFile) })
|
||||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return strings.HasSuffix(v.Name(), ".go") })
|
||||||
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
|
files = langext.ArrFilter(files, func(v os.DirEntry) bool { return !strings.HasSuffix(v.Name(), "_gen.go") })
|
||||||
@@ -71,7 +94,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
|||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
content, err := os.ReadFile(path.Join(sourceDir, f.Name()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return "", "", false, err
|
||||||
}
|
}
|
||||||
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
newChecksumStr += "\n" + f.Name() + "\t" + cryptext.BytesSha256(content)
|
||||||
}
|
}
|
||||||
@@ -82,7 +105,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
|||||||
fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
fmt.Printf("[EnumGenerate] Checksum has changed ( %s -> %s ), will generate new file\n\n", oldChecksum, newChecksum)
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
fmt.Printf("[EnumGenerate] Checksum unchanged ( %s ), nothing to do\n", oldChecksum)
|
||||||
return nil
|
return "", oldChecksum, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
allEnums := make([]EnumDef, 0)
|
allEnums := make([]EnumDef, 0)
|
||||||
@@ -93,7 +116,7 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
|||||||
fmt.Printf("========= %s =========\n\n", f.Name())
|
fmt.Printf("========= %s =========\n\n", f.Name())
|
||||||
fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()))
|
fileEnums, pn, err := processEnumFile(sourceDir, path.Join(sourceDir, f.Name()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return "", "", false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("\n")
|
fmt.Printf("\n")
|
||||||
@@ -106,20 +129,21 @@ func GenerateEnumSpecs(sourceDir string, destFile string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if pkgname == "" {
|
if pkgname == "" {
|
||||||
return errors.New("no package name found in any file")
|
return "", "", false, errors.New("no package name found in any file")
|
||||||
}
|
}
|
||||||
|
|
||||||
fdata, err := format.Source([]byte(fmtEnumOutput(newChecksum, allEnums, pkgname)))
|
rdata := fmtEnumOutput(newChecksum, allEnums, pkgname)
|
||||||
|
|
||||||
|
if !gofmt {
|
||||||
|
return rdata, newChecksum, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fdata, err := format.Source([]byte(rdata))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return "", "", false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = os.WriteFile(destFile, fdata, 0o755)
|
return string(fdata), newChecksum, true, nil
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
||||||
@@ -171,10 +195,34 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
|||||||
|
|
||||||
if match, ok := rexEnumValueDef.MatchFirst(line); ok {
|
if match, ok := rexEnumValueDef.MatchFirst(line); ok {
|
||||||
typename := match.GroupByName("type").Value()
|
typename := match.GroupByName("type").Value()
|
||||||
|
|
||||||
|
comment := match.GroupByNameOrEmpty("comm").ValueOrNil()
|
||||||
|
var descr *string = nil
|
||||||
|
var data *map[string]any = nil
|
||||||
|
if comment != nil {
|
||||||
|
comment = langext.Ptr(strings.TrimSpace(*comment))
|
||||||
|
if strings.HasPrefix(*comment, "{") {
|
||||||
|
if v, ok := tryParseDataComment(*comment); ok {
|
||||||
|
data = &v
|
||||||
|
if anyDataDescr, ok := v["description"]; ok {
|
||||||
|
if dataDescr, ok := anyDataDescr.(string); ok {
|
||||||
|
descr = &dataDescr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
descr = comment
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
descr = comment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def := EnumDefVal{
|
def := EnumDefVal{
|
||||||
VarName: match.GroupByName("name").Value(),
|
VarName: match.GroupByName("name").Value(),
|
||||||
Value: match.GroupByName("value").Value(),
|
Value: match.GroupByName("value").Value(),
|
||||||
Description: match.GroupByNameOrEmpty("descr").ValueOrNil(),
|
RawComment: comment,
|
||||||
|
Description: descr,
|
||||||
|
Data: data,
|
||||||
}
|
}
|
||||||
|
|
||||||
found := false
|
found := false
|
||||||
@@ -199,6 +247,41 @@ func processEnumFile(basedir string, fn string) ([]EnumDef, string, error) {
|
|||||||
return enums, pkgname, nil
|
return enums, pkgname, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func tryParseDataComment(s string) (map[string]any, bool) {
|
||||||
|
|
||||||
|
r := make(map[string]any)
|
||||||
|
|
||||||
|
err := json.Unmarshal([]byte(s), &r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range r {
|
||||||
|
|
||||||
|
rv := reflect.ValueOf(v)
|
||||||
|
|
||||||
|
if rv.Kind() == reflect.Ptr && rv.IsNil() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rv.Kind() == reflect.Bool {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rv.Kind() == reflect.String {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rv.Kind() == reflect.Int64 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if rv.Kind() == reflect.Float64 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
return r, true
|
||||||
|
}
|
||||||
|
|
||||||
func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string {
|
func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string {
|
||||||
|
|
||||||
templ := template.New("enum-generate")
|
templ := template.New("enum-generate")
|
||||||
@@ -211,6 +294,47 @@ func fmtEnumOutput(cs string, enums []EnumDef, pkgname string) string {
|
|||||||
"hasDescr": func(v EnumDef) bool {
|
"hasDescr": func(v EnumDef) bool {
|
||||||
return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil })
|
return langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Description != nil })
|
||||||
},
|
},
|
||||||
|
"hasData": func(v EnumDef) bool {
|
||||||
|
return len(v.Values) > 0 && langext.ArrAll(v.Values, func(val EnumDefVal) bool { return val.Data != nil })
|
||||||
|
},
|
||||||
|
"gostr": func(v any) string {
|
||||||
|
return fmt.Sprintf("%#+v", v)
|
||||||
|
},
|
||||||
|
"goobj": func(name string, v any) string {
|
||||||
|
return fmt.Sprintf("%#+v", v)
|
||||||
|
},
|
||||||
|
"godatakey": func(v string) string {
|
||||||
|
return strings.ToUpper(v[0:1]) + v[1:]
|
||||||
|
},
|
||||||
|
"godatavalue": func(v any) string {
|
||||||
|
return fmt.Sprintf("%#+v", v)
|
||||||
|
},
|
||||||
|
"godatatype": func(v any) string {
|
||||||
|
return fmt.Sprintf("%T", v)
|
||||||
|
},
|
||||||
|
"mapindex": func(v map[string]any, k string) any {
|
||||||
|
return v[k]
|
||||||
|
},
|
||||||
|
"generalDataKeys": func(v EnumDef) map[string]string {
|
||||||
|
r0 := make(map[string]int)
|
||||||
|
|
||||||
|
for _, eval := range v.Values {
|
||||||
|
for k := range *eval.Data {
|
||||||
|
if ctr, ok := r0[k]; ok {
|
||||||
|
r0[k] = ctr + 1
|
||||||
|
} else {
|
||||||
|
r0[k] = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r1 := langext.MapToArr(r0)
|
||||||
|
r2 := langext.ArrFilter(r1, func(p langext.MapEntry[string, int]) bool { return p.Value == len(v.Values) })
|
||||||
|
r3 := langext.ArrMap(r2, func(p langext.MapEntry[string, int]) string { return p.Key })
|
||||||
|
r4 := langext.ArrToKVMap(r3, func(p string) string { return p }, func(p string) string { return fmt.Sprintf("%T", (*v.Values[0].Data)[p]) })
|
||||||
|
|
||||||
|
return r4
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
templ = template.Must(templ.Parse(templateEnumGenerateText))
|
templ = template.Must(templ.Parse(templateEnumGenerateText))
|
||||||
|
@@ -11,21 +11,38 @@ const ChecksumEnumGenerator = "{{.Checksum}}" // GoExtVersion: {{.GoextVersion}}
|
|||||||
|
|
||||||
{{ $hasStr := ( . | hasStr ) }}
|
{{ $hasStr := ( . | hasStr ) }}
|
||||||
{{ $hasDescr := ( . | hasDescr ) }}
|
{{ $hasDescr := ( . | hasDescr ) }}
|
||||||
|
{{ $hasData := ( . | hasData ) }}
|
||||||
|
|
||||||
// ================================ {{.EnumTypeName}} ================================
|
// ================================ {{.EnumTypeName}} ================================
|
||||||
//
|
//
|
||||||
// File: {{.FileRelative}}
|
// File: {{.FileRelative}}
|
||||||
// StringEnum: {{$hasStr | boolToStr}}
|
// StringEnum: {{$hasStr | boolToStr}}
|
||||||
// DescrEnum: {{$hasDescr | boolToStr}}
|
// DescrEnum: {{$hasDescr | boolToStr}}
|
||||||
|
// DataEnum: {{$hasData | boolToStr}}
|
||||||
//
|
//
|
||||||
|
|
||||||
|
{{ $typename := .EnumTypeName }}
|
||||||
|
{{ $enumdef := . }}
|
||||||
|
|
||||||
var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}}
|
var __{{.EnumTypeName}}Values = []{{.EnumTypeName}}{ {{range .Values}}
|
||||||
{{.VarName}}, {{end}}
|
{{.VarName}}, {{end}}
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if $hasDescr}}
|
{{if $hasDescr}}
|
||||||
var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}}
|
var __{{.EnumTypeName}}Descriptions = map[{{.EnumTypeName}}]string{ {{range .Values}}
|
||||||
{{.VarName}}: "{{.Description | deref | trimSpace}}", {{end}}
|
{{.VarName}}: {{.Description | deref | trimSpace | gostr}}, {{end}}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
{{if $hasData}}
|
||||||
|
type {{ .EnumTypeName }}Data struct { {{ range $datakey, $datatype := ($enumdef | generalDataKeys) }}
|
||||||
|
{{ $datakey | godatakey }} {{ $datatype }} `json:"{{ $datakey }}"` {{ end }}
|
||||||
|
}
|
||||||
|
|
||||||
|
var __{{.EnumTypeName}}Data = map[{{.EnumTypeName}}]{{.EnumTypeName}}Data{ {{range .Values}} {{ $enumvalue := . }}
|
||||||
|
{{.VarName}}: {{ $typename }}Data{ {{ range $datakey, $datatype := $enumdef | generalDataKeys }}
|
||||||
|
{{ $datakey | godatakey }}: {{ (mapindex $enumvalue.Data $datakey) | godatavalue }}, {{ end }}
|
||||||
|
}, {{end}}
|
||||||
}
|
}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
@@ -64,6 +81,15 @@ func (e {{.EnumTypeName}}) Description() string {
|
|||||||
}
|
}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
|
{{if $hasData}}
|
||||||
|
func (e {{.EnumTypeName}}) Data() {{.EnumTypeName}}Data {
|
||||||
|
if d, ok := __{{.EnumTypeName}}Data[e]; ok {
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
return {{.EnumTypeName}}Data{}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
func (e {{.EnumTypeName}}) VarName() string {
|
func (e {{.EnumTypeName}}) VarName() string {
|
||||||
if d, ok := __{{.EnumTypeName}}Varnames[e]; ok {
|
if d, ok := __{{.EnumTypeName}}Varnames[e]; ok {
|
||||||
return d
|
return d
|
||||||
@@ -75,6 +101,12 @@ func (e {{.EnumTypeName}}) Meta() enums.EnumMetaValue {
|
|||||||
{{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}}
|
{{if $hasDescr}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: langext.Ptr(e.Description())} {{else}} return enums.EnumMetaValue{VarName: e.VarName(), Value: e, Description: nil} {{end}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{{if $hasDescr}}
|
||||||
|
func (e {{.EnumTypeName}}) DescriptionMeta() enums.EnumDescriptionMetaValue {
|
||||||
|
return enums.EnumDescriptionMetaValue{VarName: e.VarName(), Value: e, Description: e.Description()}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) {
|
func Parse{{.EnumTypeName}}(vv string) ({{.EnumTypeName}}, bool) {
|
||||||
for _, ev := range __{{.EnumTypeName}}Values {
|
for _, ev := range __{{.EnumTypeName}}Values {
|
||||||
if string(ev) == vv {
|
if string(ev) == vv {
|
||||||
@@ -94,4 +126,12 @@ func {{.EnumTypeName}}ValuesMeta() []enums.EnumMetaValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{{if $hasDescr}}
|
||||||
|
func {{.EnumTypeName}}ValuesDescriptionMeta() []enums.EnumDescriptionMetaValue {
|
||||||
|
return []enums.EnumDescriptionMetaValue{ {{range .Values}}
|
||||||
|
{{.VarName}}.DescriptionMeta(), {{end}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
{{end}}
|
{{end}}
|
@@ -12,8 +12,11 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed _test_example.tgz
|
//go:embed _test_example_1.tgz
|
||||||
var EnumExampleModels []byte
|
var EnumExampleModels1 []byte
|
||||||
|
|
||||||
|
//go:embed _test_example_2.tgz
|
||||||
|
var EnumExampleModels2 []byte
|
||||||
|
|
||||||
func TestGenerateEnumSpecs(t *testing.T) {
|
func TestGenerateEnumSpecs(t *testing.T) {
|
||||||
|
|
||||||
@@ -21,7 +24,7 @@ func TestGenerateEnumSpecs(t *testing.T) {
|
|||||||
|
|
||||||
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||||
|
|
||||||
err := os.WriteFile(tmpFile, EnumExampleModels, 0o777)
|
err := os.WriteFile(tmpFile, EnumExampleModels1, 0o777)
|
||||||
tst.AssertNoErr(t, err)
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||||
@@ -34,17 +37,53 @@ func TestGenerateEnumSpecs(t *testing.T) {
|
|||||||
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||||
tst.AssertNoErr(t, err)
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go")
|
s1, cs1, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true)
|
||||||
tst.AssertNoErr(t, err)
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
err = GenerateEnumSpecs(tmpDir, tmpDir+"/enums_gen.go")
|
s2, cs2, _, err := _generateEnumSpecs(tmpDir, "", "N/A", true)
|
||||||
tst.AssertNoErr(t, err)
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
tst.AssertEqual(t, cs1, cs2)
|
||||||
|
tst.AssertEqual(t, s1, s2)
|
||||||
|
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
fmt.Println("=====================================================================================================")
|
fmt.Println("=====================================================================================================")
|
||||||
fmt.Println(string(tst.Must(os.ReadFile(tmpDir + "/enums_gen.go"))(t)))
|
fmt.Println(s1)
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGenerateEnumSpecsData(t *testing.T) {
|
||||||
|
|
||||||
|
tmpFile := filepath.Join(t.TempDir(), langext.MustHexUUID()+".tgz")
|
||||||
|
|
||||||
|
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||||
|
|
||||||
|
err := os.WriteFile(tmpFile, EnumExampleModels2, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||||
|
|
||||||
|
err = os.Mkdir(tmpDir, 0o777)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() { _ = os.RemoveAll(tmpFile) })
|
||||||
|
|
||||||
|
_, err = cmdext.Runner("tar").Arg("-xvzf").Arg(tmpFile).Arg("-C").Arg(tmpDir).FailOnExitCode().FailOnTimeout().Timeout(time.Minute).Run()
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
s1, _, _, err := _generateEnumSpecs(tmpDir, "", "", true)
|
||||||
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Println("=====================================================================================================")
|
||||||
|
fmt.Println(s1)
|
||||||
fmt.Println("=====================================================================================================")
|
fmt.Println("=====================================================================================================")
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
@@ -12,8 +12,8 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed _test_example.tgz
|
//go:embed _test_example_1.tgz
|
||||||
var IDExampleModels []byte
|
var IDExampleModels1 []byte
|
||||||
|
|
||||||
func TestGenerateIDSpecs(t *testing.T) {
|
func TestGenerateIDSpecs(t *testing.T) {
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ func TestGenerateIDSpecs(t *testing.T) {
|
|||||||
|
|
||||||
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
tmpDir := filepath.Join(t.TempDir(), langext.MustHexUUID())
|
||||||
|
|
||||||
err := os.WriteFile(tmpFile, IDExampleModels, 0o777)
|
err := os.WriteFile(tmpFile, IDExampleModels1, 0o777)
|
||||||
tst.AssertNoErr(t, err)
|
tst.AssertNoErr(t, err)
|
||||||
|
|
||||||
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
t.Cleanup(func() { _ = os.Remove(tmpFile) })
|
||||||
|
@@ -15,10 +15,17 @@ type StringEnum interface {
|
|||||||
type DescriptionEnum interface {
|
type DescriptionEnum interface {
|
||||||
Enum
|
Enum
|
||||||
Description() string
|
Description() string
|
||||||
|
DescriptionMeta() EnumDescriptionMetaValue
|
||||||
}
|
}
|
||||||
|
|
||||||
type EnumMetaValue struct {
|
type EnumMetaValue struct {
|
||||||
VarName string `json:"varName"`
|
VarName string `json:"varName"`
|
||||||
Value any `json:"value"`
|
Value Enum `json:"value"`
|
||||||
Description *string `json:"description"`
|
Description *string `json:"description"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type EnumDescriptionMetaValue struct {
|
||||||
|
VarName string `json:"varName"`
|
||||||
|
Value Enum `json:"value"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
}
|
||||||
|
@@ -6,8 +6,10 @@ import (
|
|||||||
"github.com/rs/zerolog/log"
|
"github.com/rs/zerolog/log"
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext/mathext"
|
"gogs.mikescher.com/BlackForestBytes/goext/mathext"
|
||||||
|
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
@@ -126,8 +128,8 @@ func (w *GinWrapper) DebugPrintRoutes() {
|
|||||||
line := [4]string{
|
line := [4]string{
|
||||||
spec.Method,
|
spec.Method,
|
||||||
spec.URL,
|
spec.URL,
|
||||||
strings.Join(spec.Middlewares, " -> "),
|
strings.Join(langext.ArrMap(spec.Middlewares, w.cleanMiddlewareName), " -> "),
|
||||||
spec.Handler,
|
w.cleanMiddlewareName(spec.Handler),
|
||||||
}
|
}
|
||||||
|
|
||||||
lines = append(lines, line)
|
lines = append(lines, line)
|
||||||
@@ -138,12 +140,40 @@ func (w *GinWrapper) DebugPrintRoutes() {
|
|||||||
pad[3] = mathext.Max(pad[3], len(line[3]))
|
pad[3] = mathext.Max(pad[3], len(line[3]))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Gin-Routes:\n")
|
||||||
|
fmt.Printf("{\n")
|
||||||
for _, line := range lines {
|
for _, line := range lines {
|
||||||
|
|
||||||
fmt.Printf("Gin-Route: %s %s --> %s --> %s\n",
|
fmt.Printf(" %s %s --> %s --> %s\n",
|
||||||
langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2),
|
langext.StrPadRight("["+line[0]+"]", " ", pad[0]+2),
|
||||||
langext.StrPadRight(line[1], " ", pad[1]),
|
langext.StrPadRight(line[1], " ", pad[1]),
|
||||||
langext.StrPadRight(line[2], " ", pad[2]),
|
langext.StrPadRight(line[2], " ", pad[2]),
|
||||||
langext.StrPadRight(line[3], " ", pad[3]))
|
langext.StrPadRight(line[3], " ", pad[3]))
|
||||||
}
|
}
|
||||||
|
fmt.Printf("}\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *GinWrapper) cleanMiddlewareName(fname string) string {
|
||||||
|
|
||||||
|
funcSuffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`))
|
||||||
|
if match, ok := funcSuffix.MatchFirst(fname); ok {
|
||||||
|
fname = fname[:len(fname)-match.FullMatch().Length()]
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasSuffix(fname, ".(*GinRoutesWrapper).WithJSONFilter") {
|
||||||
|
fname = "[JSONFilter]"
|
||||||
|
}
|
||||||
|
|
||||||
|
if fname == "ginext.BodyBuffer" {
|
||||||
|
fname = "[BodyBuffer]"
|
||||||
|
}
|
||||||
|
|
||||||
|
skipPrefixes := []string{"api.(*Handler).", "api.", "ginext.", "handler.", "admin-app.", "employee-app.", "employer-app."}
|
||||||
|
for _, pfx := range skipPrefixes {
|
||||||
|
if strings.HasPrefix(fname, pfx) {
|
||||||
|
fname = fname[len(pfx):]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fname
|
||||||
}
|
}
|
||||||
|
@@ -3,11 +3,9 @@ package ginext
|
|||||||
import (
|
import (
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
"gogs.mikescher.com/BlackForestBytes/goext/langext"
|
||||||
"gogs.mikescher.com/BlackForestBytes/goext/rext"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"path"
|
"path"
|
||||||
"reflect"
|
"reflect"
|
||||||
"regexp"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
@@ -196,12 +194,6 @@ func nameOfFunction(f any) string {
|
|||||||
fname = fname[:len(fname)-len("-fm")]
|
fname = fname[:len(fname)-len("-fm")]
|
||||||
}
|
}
|
||||||
|
|
||||||
suffix := rext.W(regexp.MustCompile(`\.func[0-9]+(?:\.[0-9]+)*$`))
|
|
||||||
|
|
||||||
if match, ok := suffix.MatchFirst(fname); ok {
|
|
||||||
fname = fname[:len(fname)-match.FullMatch().Length()]
|
|
||||||
}
|
|
||||||
|
|
||||||
return fname
|
return fname
|
||||||
}
|
}
|
||||||
|
|
||||||
|
2
go.mod
2
go.mod
@@ -7,7 +7,7 @@ require (
|
|||||||
github.com/jmoiron/sqlx v1.3.5
|
github.com/jmoiron/sqlx v1.3.5
|
||||||
github.com/rs/xid v1.5.0
|
github.com/rs/xid v1.5.0
|
||||||
github.com/rs/zerolog v1.31.0
|
github.com/rs/zerolog v1.31.0
|
||||||
go.mongodb.org/mongo-driver v1.13.0
|
go.mongodb.org/mongo-driver v1.13.1
|
||||||
golang.org/x/crypto v0.16.0
|
golang.org/x/crypto v0.16.0
|
||||||
golang.org/x/sys v0.15.0
|
golang.org/x/sys v0.15.0
|
||||||
golang.org/x/term v0.15.0
|
golang.org/x/term v0.15.0
|
||||||
|
2
go.sum
2
go.sum
@@ -126,6 +126,8 @@ go.mongodb.org/mongo-driver v1.12.1 h1:nLkghSU8fQNaK7oUmDhQFsnrtcoNy7Z6LVFKsEecq
|
|||||||
go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
|
go.mongodb.org/mongo-driver v1.12.1/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
|
||||||
go.mongodb.org/mongo-driver v1.13.0 h1:67DgFFjYOCMWdtTEmKFpV3ffWlFnh+CYZ8ZS/tXWUfY=
|
go.mongodb.org/mongo-driver v1.13.0 h1:67DgFFjYOCMWdtTEmKFpV3ffWlFnh+CYZ8ZS/tXWUfY=
|
||||||
go.mongodb.org/mongo-driver v1.13.0/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
|
go.mongodb.org/mongo-driver v1.13.0/go.mod h1:/rGBTebI3XYboVmgz+Wv3Bcbl3aD0QF9zl6kDDw18rQ=
|
||||||
|
go.mongodb.org/mongo-driver v1.13.1 h1:YIc7HTYsKndGK4RFzJ3covLz1byri52x0IoMB0Pt/vk=
|
||||||
|
go.mongodb.org/mongo-driver v1.13.1/go.mod h1:wcDf1JBCXy2mOW0bWHwO/IOYqdca1MPCwDtFu/Z9+eo=
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
|
golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
|
||||||
golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
package goext
|
package goext
|
||||||
|
|
||||||
const GoextVersion = "0.0.334"
|
const GoextVersion = "0.0.345"
|
||||||
|
|
||||||
const GoextVersionTimestamp = "2023-12-05T19:23:27+0100"
|
const GoextVersionTimestamp = "2023-12-07T19:39:31+0100"
|
||||||
|
@@ -29,6 +29,14 @@ func ArrToMap[T comparable, V any](a []V, keyfunc func(V) T) map[T]V {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ArrToKVMap[T any, K comparable, V any](a []T, keyfunc func(T) K, valfunc func(T) V) map[K]V {
|
||||||
|
result := make(map[K]V, len(a))
|
||||||
|
for _, v := range a {
|
||||||
|
result[keyfunc(v)] = valfunc(v)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
func ArrToSet[T comparable](a []T) map[T]bool {
|
func ArrToSet[T comparable](a []T) map[T]bool {
|
||||||
result := make(map[T]bool, len(a))
|
result := make(map[T]bool, len(a))
|
||||||
for _, v := range a {
|
for _, v := range a {
|
||||||
|
Reference in New Issue
Block a user