create migration script for old data

This commit is contained in:
2023-01-15 06:30:30 +01:00
parent 82bc887767
commit 89fd0dfed7
34 changed files with 1617 additions and 58 deletions

View File

@@ -5,6 +5,8 @@ _build
DOCKER_GIT_INFO
scn_export.dat
scn_export.json
##############

View File

@@ -5,7 +5,7 @@
<file url="PROJECT" dialect="SQLite" />
</component>
<component name="SqlResolveMappings">
<file url="file://$PROJECT_DIR$/db/impl/primary/database.go" scope="{&quot;node&quot;:{ &quot;@negative&quot;:&quot;1&quot;, &quot;group&quot;:{ &quot;@kind&quot;:&quot;root&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;b3228d61-4c36-41ce-803f-63bd80e198b3&quot; }, &quot;group&quot;:{ &quot;@kind&quot;:&quot;schema&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;schema_3.0.ddl&quot; } } } } } }}" />
<file url="PROJECT" scope="{&quot;node&quot;:{ &quot;@negative&quot;:&quot;1&quot;, &quot;group&quot;:{ &quot;@kind&quot;:&quot;root&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;b3228d61-4c36-41ce-803f-63bd80e198b3&quot; }, &quot;group&quot;:{ &quot;@kind&quot;:&quot;schema&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;schema_3.0.ddl&quot; } } } } } }}" />
<file url="file://$PROJECT_DIR$" scope="{&quot;node&quot;:{ &quot;@negative&quot;:&quot;1&quot;, &quot;group&quot;:{ &quot;@kind&quot;:&quot;root&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;b3228d61-4c36-41ce-803f-63bd80e198b3&quot; }, &quot;group&quot;:{ &quot;@kind&quot;:&quot;schema&quot;, &quot;node&quot;:{ &quot;name&quot;:{ &quot;@qname&quot;:&quot;schema_3.0.ddl&quot; } } } } } }}" />
<file url="PROJECT" scope="" />
</component>
</project>

View File

@@ -71,4 +71,7 @@ fmt:
test:
go test ./test/...
migrate:
CGO_ENABLED=1 go build -v -o _build/scn_migrate -tags "timetzdata sqlite_fts5 sqlite_foreign_keys" ./cmd/migrate
./_build/scn_migrate

View File

@@ -9,6 +9,8 @@
- finish tests (!)
- migration script for existing data
apply local deletion in (my) app
delete excessive dockerwatch messages (directly in db?)
- app-store link in HTML
@@ -43,6 +45,8 @@
(or add another /kuma endpoint)
-> https://webhook.site/
- endpoint to list all servernames of user (distinct select)
#### PERSONAL
- in my script: use `srvname` for sendername

View File

@@ -7,6 +7,7 @@ import (
"errors"
"github.com/gin-gonic/gin"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"net/http"
"regexp"
"strings"
@@ -14,15 +15,15 @@ import (
type WebsiteHandler struct {
app *logic.Application
rexTemplate *regexp.Regexp
rexConfig *regexp.Regexp
rexTemplate rext.Regex
rexConfig rext.Regex
}
func NewWebsiteHandler(app *logic.Application) WebsiteHandler {
return WebsiteHandler{
app: app,
rexTemplate: regexp.MustCompile("{{template\\|[A-Za-z0-9_\\-\\[\\].]+}}"),
rexConfig: regexp.MustCompile("{{config\\|[A-Za-z0-9_\\-.]+}}"),
rexTemplate: rext.W(regexp.MustCompile("{{template\\|[A-Za-z0-9_\\-\\[\\].]+}}")),
rexConfig: rext.W(regexp.MustCompile("{{config\\|[A-Za-z0-9_\\-.]+}}")),
}
}
@@ -77,17 +78,19 @@ func (h WebsiteHandler) CSS(g *gin.Context) ginresp.HTTPResponse {
}
func (h WebsiteHandler) serveAsset(g *gin.Context, fn string, repl bool) ginresp.HTTPResponse {
data, err := website.Assets.ReadFile(fn)
_data, err := website.Assets.ReadFile(fn)
if err != nil {
return ginresp.Status(http.StatusNotFound)
}
data := string(_data)
if repl {
failed := false
data = h.rexTemplate.ReplaceAllFunc(data, func(match []byte) []byte {
data = h.rexTemplate.ReplaceAllFunc(data, func(match string) string {
prefix := len("{{template|")
suffix := len("}}")
fnSub := string(match[prefix : len(match)-suffix])
fnSub := match[prefix : len(match)-suffix]
fnSub = strings.ReplaceAll(fnSub, "[theme]", h.getTheme(g))
@@ -96,23 +99,23 @@ func (h WebsiteHandler) serveAsset(g *gin.Context, fn string, repl bool) ginresp
log.Error().Str("templ", string(match)).Str("fnSub", fnSub).Str("source", fn).Msg("Failed to replace template")
failed = true
}
return subdata
return string(subdata)
})
if failed {
return ginresp.InternalError(errors.New("template replacement failed"))
}
data = h.rexConfig.ReplaceAllFunc(data, func(match []byte) []byte {
data = h.rexConfig.ReplaceAllFunc(data, func(match string) string {
prefix := len("{{config|")
suffix := len("}}")
cfgKey := match[prefix : len(match)-suffix]
cval, ok := h.getReplConfig(string(cfgKey))
cval, ok := h.getReplConfig(cfgKey)
if !ok {
log.Error().Str("templ", string(match)).Str("source", fn).Msg("Failed to replace config")
log.Error().Str("templ", match).Str("source", fn).Msg("Failed to replace config")
failed = true
}
return []byte(cval)
return cval
})
if failed {
return ginresp.InternalError(errors.New("config replacement failed"))
@@ -138,7 +141,7 @@ func (h WebsiteHandler) serveAsset(g *gin.Context, fn string, repl bool) ginresp
mime = "image/svg+xml"
}
return ginresp.Data(http.StatusOK, mime, data)
return ginresp.Data(http.StatusOK, mime, []byte(data))
}
func (h WebsiteHandler) getReplConfig(key string) (string, bool) {

View File

@@ -0,0 +1,871 @@
package main
import (
scn "blackforestbytes.com/simplecloudnotifier"
"blackforestbytes.com/simplecloudnotifier/logic"
"blackforestbytes.com/simplecloudnotifier/models"
"bufio"
"context"
"encoding/json"
"fmt"
_ "github.com/go-sql-driver/mysql"
"github.com/jmoiron/sqlx"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"gogs.mikescher.com/BlackForestBytes/goext/sq"
"os"
"regexp"
"strings"
"time"
)
type OldUser struct {
UserId int64 `db:"user_id"`
UserKey string `db:"user_key"`
FcmToken *string `db:"fcm_token"`
MessagesSent int64 `db:"messages_sent"`
TimestampCreated time.Time `db:"timestamp_created"`
TimestampAccessed *time.Time `db:"timestamp_accessed"`
QuotaToday int64 `db:"quota_today"`
QuotaDay *time.Time `db:"quota_day"`
IsPro bool `db:"is_pro"`
ProToken *string `db:"pro_token"`
}
type OldMessage struct {
ScnMessageId int64 `db:"scn_message_id"`
SenderUserId int64 `db:"sender_user_id"`
TimestampReal time.Time `db:"timestamp_real"`
Ack []uint8 `db:"ack"`
Title string `db:"title"`
Content *string `db:"content"`
Priority int64 `db:"priority"`
Sendtime int64 `db:"sendtime"`
FcmMessageId *string `db:"fcm_message_id"`
UsrMessageId *string `db:"usr_message_id"`
}
type SCNExport struct {
Messages []SCNExportMessage `json:"cmessagelist"`
}
type SCNExportMessage struct {
MessageID int64 `json:"scnid"`
}
func main() {
ctx := context.Background()
conf, _ := scn.GetConfig("local-host")
conf.DBMain.File = ".run-data/migrate_main.sqlite3"
conf.DBMain.EnableLogger = false
if _, err := os.Stat(".run-data/migrate_main.sqlite3"); err == nil {
err = os.Remove(".run-data/migrate_main.sqlite3")
if err != nil {
panic(err)
}
}
if _, err := os.Stat(".run-data/migrate_main.sqlite3-shm"); err == nil {
err = os.Remove(".run-data/migrate_main.sqlite3-shm")
if err != nil {
panic(err)
}
}
if _, err := os.Stat(".run-data/migrate_main.sqlite3-wal"); err == nil {
err = os.Remove(".run-data/migrate_main.sqlite3-wal")
if err != nil {
panic(err)
}
}
sqlite, err := logic.NewDBPool(conf)
if err != nil {
panic(err)
}
err = sqlite.Migrate(ctx)
if err != nil {
panic(err)
}
connstr := os.Getenv("SQL_CONN_STR")
if connstr == "" {
scanner := bufio.NewScanner(os.Stdin)
fmt.Print("Enter DB URL [127.0.0.1:3306]: ")
scanner.Scan()
host := scanner.Text()
if host == "" {
host = "127.0.0.1:3306"
}
fmt.Print("Enter DB Username [root]: ")
scanner.Scan()
username := scanner.Text()
if host == "" {
host = "root"
}
fmt.Print("Enter DB Password []: ")
scanner.Scan()
pass := scanner.Text()
if host == "" {
host = ""
}
connstr = fmt.Sprintf("%s:%s@tcp(%s)", username, pass, host)
}
_dbold, err := sqlx.Open("mysql", connstr+"/simple_cloud_notifier?parseTime=true")
if err != nil {
panic(err)
}
dbold := sq.NewDB(_dbold)
rowsUser, err := dbold.Query(ctx, "SELECT * FROM users", sq.PP{})
if err != nil {
panic(err)
}
var export SCNExport
exfn, err := os.ReadFile("scn_export.json")
err = json.Unmarshal(exfn, &export)
if err != nil {
panic(err)
}
appids := make(map[int64]int64)
for _, v := range export.Messages {
appids[v.MessageID] = v.MessageID
}
users := make([]OldUser, 0)
for rowsUser.Next() {
var u OldUser
err = rowsUser.StructScan(&u)
if err != nil {
panic(err)
}
users = append(users, u)
}
fmt.Printf("\n")
for _, v := range users {
fmt.Printf("========================================\n")
fmt.Printf(" MIGRATE USER %d\n", v.UserId)
fmt.Printf("========================================\n")
migrateUser(ctx, sqlite.Primary.DB(), dbold, v, appids)
fmt.Printf("========================================\n")
fmt.Printf("\n")
fmt.Printf("\n")
}
err = sqlite.Stop(context.Background())
if err != nil {
panic(err)
}
}
var rexTitleChannel = rext.W(regexp.MustCompile("^\\[(?P<channel>[A-Za-z\\-0-9_ ]+)] (?P<title>(.|\\r|\\n)+)$"))
var usedFCM = make(map[string]models.ClientID)
func migrateUser(ctx context.Context, dbnew sq.DB, dbold sq.DB, user OldUser, appids map[int64]int64) {
rowsMessages, err := dbold.Query(ctx, "SELECT * FROM messages WHERE sender_user_id = :uid ORDER BY timestamp_real ASC", sq.PP{"uid": user.UserId})
if err != nil {
panic(err)
}
messages := make([]OldMessage, 0)
for rowsMessages.Next() {
var m OldMessage
err = rowsMessages.StructScan(&m)
if err != nil {
panic(err)
}
messages = append(messages, m)
}
fmt.Printf("Found %d messages\n", len(messages))
userid := models.NewUserID()
fmt.Printf("New UserID: %s\n", userid)
readKey := scn.RandomAuthKey()
sendKey := scn.RandomAuthKey()
adminKey := user.UserKey
protoken := user.ProToken
if protoken != nil {
protoken = langext.Ptr("ANDROID|v1|" + *protoken)
}
_, err = dbnew.Exec(ctx, "INSERT INTO users (user_id, username, read_key, send_key, admin_key, is_pro, pro_token, timestamp_created) VALUES (:uid, :un, :rk, :sk, :ak, :pro, :tok, :ts)", sq.PP{
"uid": userid,
"un": nil,
"rk": readKey,
"sk": sendKey,
"ak": adminKey,
"pro": langext.Conditional(user.IsPro, 1, 0),
"tok": protoken,
"ts": user.TimestampCreated.UnixMilli(),
})
if err != nil {
panic(err)
}
_, err = dbnew.Exec(ctx, "INSERT INTO compat_ids (old, new, type) VALUES (:old, :new, :typ)", sq.PP{
"old": user.UserId,
"new": userid,
"typ": "userid",
})
if err != nil {
panic(err)
}
var clientid *models.ClientID = nil
if user.FcmToken != nil && *user.FcmToken != "BLACKLISTED" {
if _, ok := usedFCM[*user.FcmToken]; ok {
fmt.Printf("Skip Creating Client (fcm token reuse)\n")
} else {
_clientid := models.NewClientID()
_, err = dbnew.Exec(ctx, "INSERT INTO clients (client_id, user_id, type, fcm_token, timestamp_created, agent_model, agent_version) VALUES (:cid, :uid, :typ, :fcm, :ts, :am, :av)", sq.PP{
"cid": _clientid,
"uid": userid,
"typ": "ANDROID",
"fcm": *user.FcmToken,
"ts": user.TimestampCreated.UnixMilli(),
"am": "[migrated]",
"av": "[migrated]",
})
if err != nil {
panic(err)
}
fmt.Printf("Created Client %s\n", _clientid)
clientid = &_clientid
usedFCM[*user.FcmToken] = _clientid
}
}
mainChannelID := models.NewChannelID()
_, err = dbnew.Exec(ctx, "INSERT INTO channels (channel_id, owner_user_id, display_name, internal_name, description_name, subscribe_key, send_key, timestamp_created) VALUES (:cid, :ouid, :dnam, :inam, :hnam, :subkey, :sendkey, :ts)", sq.PP{
"cid": mainChannelID,
"ouid": userid,
"dnam": "main",
"inam": "main",
"hnam": nil,
"subkey": scn.RandomAuthKey(),
"sendkey": scn.RandomAuthKey(),
"ts": user.TimestampCreated.UnixMilli(),
})
if err != nil {
panic(err)
}
fmt.Printf("Created (Main) Channel [%s]: %s\n", "main", mainChannelID)
_, err = dbnew.Exec(ctx, "INSERT INTO subscriptions (subscription_id, subscriber_user_id, channel_owner_user_id, channel_internal_name, channel_id, timestamp_created, confirmed) VALUES (:sid, :suid, :ouid, :cnam, :cid, :ts, :conf)", sq.PP{
"sid": models.NewSubscriptionID(),
"suid": user.UserId,
"ouid": user.UserId,
"cnam": "main",
"cid": mainChannelID,
"ts": user.TimestampCreated.UnixMilli(),
"conf": true,
})
if err != nil {
panic(err)
}
channelMap := make(map[string]models.ChannelID)
lastTitle := ""
lastChannel := models.NewChannelID()
lastContent := langext.Ptr("")
lastSendername := langext.Ptr("")
lastTimestamp := time.Time{}
for _, oldmessage := range messages {
messageid := models.NewMessageID()
title := oldmessage.Title
channelInternalName := "main"
channelID := mainChannelID
if oldmessage.UsrMessageId != nil && strings.TrimSpace(*oldmessage.UsrMessageId) == "" {
oldmessage.UsrMessageId = nil
}
if match, ok := rexTitleChannel.MatchFirst(title); ok {
chanNameTitle := match.GroupByName("channel").Value()
if strings.HasPrefix(chanNameTitle, "VBOARD ERROR") {
chanNameTitle = "VBOARD-ERROR"
}
if chanNameTitle != "status" {
title = match.GroupByName("title").Value()
dummyApp := logic.Application{}
dispName := dummyApp.NormalizeChannelDisplayName(chanNameTitle)
intName := dummyApp.NormalizeChannelInternalName(chanNameTitle)
if v, ok := channelMap[intName]; ok {
channelID = v
channelInternalName = intName
} else {
channelID = models.NewChannelID()
channelInternalName = intName
_, err = dbnew.Exec(ctx, "INSERT INTO channels (channel_id, owner_user_id, display_name, internal_name, description_name, subscribe_key, send_key, timestamp_created) VALUES (:cid, :ouid, :dnam, :inam, :hnam, :subkey, :sendkey, :ts)", sq.PP{
"cid": channelID,
"ouid": userid,
"dnam": dispName,
"inam": intName,
"hnam": nil,
"subkey": scn.RandomAuthKey(),
"sendkey": scn.RandomAuthKey(),
"ts": oldmessage.TimestampReal.UnixMilli(),
})
if err != nil {
panic(err)
}
_, err = dbnew.Exec(ctx, "INSERT INTO subscriptions (subscription_id, subscriber_user_id, channel_owner_user_id, channel_internal_name, channel_id, timestamp_created, confirmed) VALUES (:sid, :suid, :ouid, :cnam, :cid, :ts, :conf)", sq.PP{
"sid": models.NewSubscriptionID(),
"suid": user.UserId,
"ouid": user.UserId,
"cnam": intName,
"cid": channelID,
"ts": oldmessage.TimestampReal.UnixMilli(),
"conf": true,
})
if err != nil {
panic(err)
}
channelMap[intName] = channelID
fmt.Printf("Auto Created Channel [%s]: %s\n", dispName, channelID)
}
}
}
sendername := determineSenderName(user, oldmessage, title, oldmessage.Content, channelInternalName)
if lastTitle == title && channelID == lastChannel &&
langext.PtrEquals(lastContent, oldmessage.Content) &&
langext.PtrEquals(lastSendername, sendername) && oldmessage.TimestampReal.Sub(lastTimestamp) < 5*time.Second {
lastTitle = title
lastChannel = channelID
lastContent = oldmessage.Content
lastSendername = sendername
lastTimestamp = oldmessage.TimestampReal
fmt.Printf("Skip message [%d] \"%s\" (fast-duplicate)\n", oldmessage.ScnMessageId, oldmessage.Title)
continue
}
var sendTimeMillis *int64 = nil
if oldmessage.Sendtime > 0 && (oldmessage.Sendtime*1000) != oldmessage.TimestampReal.UnixMilli() {
sendTimeMillis = langext.Ptr(oldmessage.Sendtime * 1000)
}
if user.UserId == 56 && oldmessage.ScnMessageId >= 15729 {
if _, ok := appids[oldmessage.ScnMessageId]; !ok {
lastTitle = title
lastChannel = channelID
lastContent = oldmessage.Content
lastSendername = sendername
lastTimestamp = oldmessage.TimestampReal
fmt.Printf("Skip message [%d] \"%s\" (locally deleted in app)\n", oldmessage.ScnMessageId, oldmessage.Title)
continue
}
}
pp := sq.PP{
"mid": messageid,
"suid": userid,
"ouid": user.UserId,
"cnam": channelInternalName,
"cid": channelID,
"tsr": oldmessage.TimestampReal.UnixMilli(),
"tsc": sendTimeMillis,
"tit": title,
"cnt": oldmessage.Content,
"prio": oldmessage.Priority,
"umid": oldmessage.UsrMessageId,
"ip": "",
"snam": sendername,
}
_, err = dbnew.Exec(ctx, "INSERT INTO messages (message_id, sender_user_id, owner_user_id, channel_internal_name, channel_id, timestamp_real, timestamp_client, title, content, priority, usr_message_id, sender_ip, sender_name) VALUES (:mid, :suid, :ouid, :cnam, :cid, :tsr, :tsc, :tit, :cnt, :prio, :umid, :ip, :snam)", pp)
if err != nil {
jv, _ := json.MarshalIndent(pp, "", " ")
fmt.Printf("%s", string(jv))
panic(err)
}
_, err = dbnew.Exec(ctx, "INSERT INTO compat_ids (old, new, type) VALUES (:old, :new, :typ)", sq.PP{
"old": oldmessage.ScnMessageId,
"new": messageid,
"typ": "messageid",
})
if err != nil {
panic(err)
}
if len(oldmessage.Ack) == 1 && oldmessage.Ack[0] == 1 {
if clientid != nil {
_, err = dbnew.Exec(ctx, "INSERT INTO deliveries (delivery_id, message_id, receiver_user_id, receiver_client_id, timestamp_created, timestamp_finalized, status, fcm_message_id, next_delivery) VALUES (:did, :mid, :ruid, :rcid, :tsc, :tsf, :stat, :fcm, :next)", sq.PP{
"did": models.NewDeliveryID(),
"mid": messageid,
"ruid": user.UserId,
"rcid": *clientid,
"tsc": oldmessage.TimestampReal.UnixMilli(),
"tsf": oldmessage.TimestampReal.UnixMilli(),
"stat": models.DeliveryStatusSuccess,
"fcm": *user.FcmToken,
"next": nil,
})
if err != nil {
panic(err)
}
}
} else if len(oldmessage.Ack) == 1 && oldmessage.Ack[0] == 0 {
if clientid != nil {
_, err = dbnew.Exec(ctx, "INSERT INTO deliveries (delivery_id, message_id, receiver_user_id, receiver_client_id, timestamp_created, timestamp_finalized, status, fcm_message_id, next_delivery) VALUES (:did, :mid, :ruid, :rcid, :tsc, :tsf, :stat, :fcm, :next)", sq.PP{
"did": models.NewDeliveryID(),
"mid": messageid,
"ruid": user.UserId,
"rcid": *clientid,
"tsc": oldmessage.TimestampReal.UnixMilli(),
"tsf": oldmessage.TimestampReal.UnixMilli(),
"stat": models.DeliveryStatusFailed,
"fcm": *user.FcmToken,
"next": nil,
})
if err != nil {
panic(err)
}
fmt.Printf("Create failed-delivery for message %d (no ack)\n", oldmessage.ScnMessageId)
}
} else {
panic("cannot parse ack")
}
lastTitle = title
lastChannel = channelID
lastContent = oldmessage.Content
lastSendername = sendername
lastTimestamp = oldmessage.TimestampReal
}
}
func determineSenderName(user OldUser, oldmessage OldMessage, title string, content *string, channame string) *string {
if user.UserId != 56 {
return nil
}
if channame == "t-ctrl" {
return langext.Ptr("sbox")
}
if channame == "torr" {
return langext.Ptr("sbox")
}
if channame == "yt-dl" {
return langext.Ptr("mscom")
}
if channame == "ncc-upload" {
return langext.Ptr("mscom")
}
if channame == "cron" {
if strings.Contains(title, "error on bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "error on mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "error on niflheim-3") {
return langext.Ptr("niflheim-3")
}
if strings.Contains(*content, "on mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "on bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "gogitmirror_cron") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "comic_downloader") {
return langext.Ptr("mscom")
}
}
if channame == "sshguard" {
if strings.Contains(*content, "logged in to mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "logged in to bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "logged in to statussrv") {
return langext.Ptr("statussrv")
}
}
if channame == "docker-watch" {
if strings.Contains(title, "on plantafelstaging") {
return langext.Ptr("plantafelstaging")
}
if strings.Contains(title, "@ mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "@ bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/scn_server:latest") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "archivebox/archivebox:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "antoniomika/sish:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "binwiederhier/ntfy:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "registry.blackforestbytes.com/kgserver:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "registry.blackforestbytes.com/mikescher/kgserver:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "jenkins/jenkins:lts") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "mikescher/youtube-dl-viewer:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "etherpad/etherpad:latest") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "teamcity_agent") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "teamcity_server") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/inoshop/") {
return langext.Ptr("inoshop")
}
if strings.Contains(*content, "inopart_mongo_") {
return langext.Ptr("inoshop")
}
if strings.Contains(*content, "Image: wkk_") {
return langext.Ptr("wkk")
}
if strings.Contains(*content, "registry.blackforestbytes.com/holz100") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/bewirto") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "registry.blackforestbytes.com/bfb-website") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/bfb/website") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/psycho/backend") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "registry.blackforestbytes.com/vereinsboard") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/isiproject") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/ar-app-supportchat-server") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/planitec/ar-app-supportchat-server") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "docker_registry") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "registry.blackforestbytes.com/balu") && strings.Contains(*content, "prod") {
return langext.Ptr("lbxprod")
}
if strings.Contains(*content, "registry.blackforestbytes.com/balu") && strings.Contains(*content, "dev") {
return langext.Ptr("lbxdev")
}
if strings.Contains(*content, "Server: bfb-testserver") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "wptest_") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "balu-db") {
return langext.Ptr("lbprod")
}
}
if channame == "certbot" {
if strings.Contains(title, "Update cert_badennet_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_badennet_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_bfbugs_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update bfbugs_0001") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update inoshop_bfb") {
return langext.Ptr("inoshop")
}
if strings.Contains(title, "Update cert_bfb_0001") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Update cert_bugkultur_0001") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Update cert_public_0001") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Update cert_korbers_0001") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Update cert_wkk_staging_external") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Update cert_wkk_production_external") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Update cert_wkk_develop_external") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Update cert_wkk_internal") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Update bfb_de_wildcard") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update cannonconquest") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update isiproject_wildcard") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update vereinsboard_demo") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update vereinsboard_wildcard") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Update cert_bewirto_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_badennet_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_mampfkultur_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(title, "Update cert_psycho_main") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "DNS:*.blackforestbytes.com") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "DNS:*.mikescher.com") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "plantafel-digital.de") {
return langext.Ptr("plan-web-prod")
}
if strings.Contains(title, "plantafeldev.de") {
return langext.Ptr("plantafeldev")
}
if strings.Contains(title, "plantafelstaging.de") {
return langext.Ptr("plantafeldev")
}
if strings.Contains(*content, "DNS:*.plantafeldev.de") {
return langext.Ptr("plantafeldev")
}
if strings.Contains(*content, "plantafel-digital.de") {
return langext.Ptr("plan-web-prod")
}
if strings.Contains(*content, "plantafeldev.de") {
return langext.Ptr("plantafeldev")
}
if strings.Contains(*content, "plantafelstaging.de") {
return langext.Ptr("plantafeldev")
}
}
if channame == "space-warning" {
if title == "bfb" {
return langext.Ptr("bfb")
}
if title == "mscom" {
return langext.Ptr("mscom")
}
if title == "plan-web-prod" {
return langext.Ptr("plan-web-prod")
}
if title == "statussrv" {
return langext.Ptr("statussrv")
}
}
if channame == "srv-backup" {
if strings.Contains(*content, "Server: bfb-testserver") {
return langext.Ptr("bfb-testserver")
}
if strings.Contains(*content, "Server: bfb") {
return langext.Ptr("bfb")
}
if strings.Contains(*content, "Server: mscom") {
return langext.Ptr("mscom")
}
if strings.Contains(*content, "Server: statussrv") {
return langext.Ptr("statussrv")
}
}
if title == "[status] Updating uptime-kuma image" {
return langext.Ptr("statussrv")
}
if channame == "omv-backup" {
return langext.Ptr("omv")
}
if channame == "omv-rcheck" {
return langext.Ptr("omv")
}
if channame == "tfin" {
return langext.Ptr("sbox")
}
if channame == "vboard-error" {
return langext.Ptr("bfb")
}
if channame == "vboard" {
return langext.Ptr("bfb")
}
if channame == "cubox" {
return langext.Ptr("cubox")
}
if channame == "sys" {
if strings.Contains(title, "h2896063") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "h2516246") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "h2770024") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Reboot plan-web-prod") {
return langext.Ptr("plan-web-prod")
}
if strings.Contains(title, "Reboot mikescher.com") {
return langext.Ptr("mscom")
}
if strings.Contains(title, "Reboot blackforestbytes.com") {
return langext.Ptr("bfb")
}
if strings.Contains(title, "Reboot plan-web-dev") {
return langext.Ptr("plan-web-dev")
}
if strings.Contains(title, "Reboot plan-web-staging") {
return langext.Ptr("plan-web-staging")
}
if strings.Contains(title, "Reboot virmach-01") {
return langext.Ptr("statussrv")
}
if strings.Contains(title, "Reboot wkk-1") {
return langext.Ptr("wkk")
}
if strings.Contains(title, "Reboot lbxprod") {
return langext.Ptr("lbxprod")
}
}
if channame == "yt-tvc" {
return langext.Ptr("mscom")
}
if channame == "gdapi" {
return langext.Ptr("bfb")
}
if channame == "ttrss" {
return langext.Ptr("mscom")
}
if title == "NCC Upload failed" || title == "NCC Upload successful" {
return langext.Ptr("mscom")
}
if oldmessage.ScnMessageId == 7975 {
return langext.Ptr("mscom")
}
if strings.Contains(title, "bfbackup job") {
return langext.Ptr("bfbackup")
}
if strings.Contains(title, "Repo migration of /volume1") {
return langext.Ptr("bfbackup")
}
//fmt.Printf("Failed to determine sender of [%d] '%s' '%s'\n", oldmessage.ScnMessageId, oldmessage.Title, langext.Coalesce(oldmessage.Content, "<NULL>"))
fmt.Printf("Failed to determine sender of [%d] '%s'\n", oldmessage.ScnMessageId, oldmessage.Title)
return nil
}

View File

@@ -55,6 +55,7 @@ type DBConfig struct {
CheckForeignKeys bool `env:"CHECKFOREIGNKEYS"`
SingleConn bool `env:"SINGLECONNECTION"`
BusyTimeout time.Duration `env:"BUSYTIMEOUT"`
EnableLogger bool `env:"ENABLELOGGER"`
}
var Conf Config
@@ -78,6 +79,7 @@ var configLocHost = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: ".run-data/loc_requests.sqlite3",
@@ -90,6 +92,7 @@ var configLocHost = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: ".run-data/loc_logs.sqlite3",
@@ -102,6 +105,7 @@ var configLocHost = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,
@@ -147,6 +151,7 @@ var configLocDocker = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: "/data/docker_scn_requests.sqlite3",
@@ -159,6 +164,7 @@ var configLocDocker = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: "/data/docker_scn_logs.sqlite3",
@@ -171,6 +177,7 @@ var configLocDocker = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,
@@ -215,6 +222,7 @@ var configDev = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: "/data/scn_requests.sqlite3",
@@ -227,6 +235,7 @@ var configDev = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: "/data/scn_logs.sqlite3",
@@ -239,6 +248,7 @@ var configDev = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,
@@ -283,6 +293,7 @@ var configStag = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: "/data/scn_requests.sqlite3",
@@ -295,6 +306,7 @@ var configStag = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: "/data/scn_logs.sqlite3",
@@ -307,6 +319,7 @@ var configStag = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,
@@ -351,6 +364,7 @@ var configProd = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 100 * time.Millisecond,
EnableLogger: true,
},
DBRequests: DBConfig{
File: "/data/scn_requests.sqlite3",
@@ -363,6 +377,7 @@ var configProd = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
DBLogs: DBConfig{
File: "/data/scn_logs.sqlite3",
@@ -375,6 +390,7 @@ var configProd = func() Config {
ConnMaxLifetime: 60 * time.Minute,
ConnMaxIdleTime: 60 * time.Minute,
BusyTimeout: 500 * time.Millisecond,
EnableLogger: true,
},
RequestTimeout: 16 * time.Second,
RequestMaxRetry: 8,

View File

@@ -6,6 +6,8 @@ import (
)
type DatabaseImpl interface {
DB() sq.DB
Migrate(ctx context.Context) error
Ping(ctx context.Context) error
BeginTx(ctx context.Context) (sq.Tx, error)

View File

@@ -81,7 +81,7 @@ func (l DBLogger) PostExec(txID *uint16, sqlOriginal string, sqlReal string, par
}
func fmtSQLPrint(sql string) string {
if strings.Contains(sql, ";") {
if strings.Contains(sql, ";") && len(sql) > 1024 {
return "(...multi...)"
}

View File

@@ -6,6 +6,7 @@ import (
"fmt"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"gogs.mikescher.com/BlackForestBytes/goext/sq"
"regexp"
"strings"
@@ -37,7 +38,7 @@ type DBPreprocessor struct {
cacheQuery map[string]string
}
var regexAlias = regexp.MustCompile("([A-Za-z_\\-0-9]+)\\s+AS\\s+([A-Za-z_\\-0-9]+)")
var regexAlias = rext.W(regexp.MustCompile("([A-Za-z_\\-0-9]+)\\s+AS\\s+([A-Za-z_\\-0-9]+)"))
func NewDBPreprocessor(db sq.DB) (*DBPreprocessor, error) {
@@ -146,8 +147,8 @@ func (pp *DBPreprocessor) PreQuery(ctx context.Context, txID *uint16, sql *strin
newsel := make([]string, 0)
aliasMap := make(map[string]string)
for _, v := range regexAlias.FindAllStringSubmatch(sqlOriginal, idxFrom+len(" FROM")) {
aliasMap[strings.TrimSpace(v[2])] = strings.TrimSpace(v[1])
for _, v := range regexAlias.MatchAll(sqlOriginal) {
aliasMap[strings.TrimSpace(v.GroupByIndex(1).Value())] = strings.TrimSpace(v.GroupByIndex(2).Value())
}
for _, expr := range split {

View File

@@ -42,7 +42,9 @@ func NewLogsDatabase(cfg server.Config) (*Database, error) {
qqdb := sq.NewDB(xdb)
qqdb.AddListener(dbtools.DBLogger{})
if conf.EnableLogger {
qqdb.AddListener(dbtools.DBLogger{})
}
pp, err := dbtools.NewDBPreprocessor(qqdb)
if err != nil {
@@ -56,6 +58,10 @@ func NewLogsDatabase(cfg server.Config) (*Database, error) {
return scndb, nil
}
func (db *Database) DB() sq.DB {
return db.db
}
func (db *Database) Migrate(ctx context.Context) error {
ctx, cancel := context.WithTimeout(context.Background(), 24*time.Second)
defer cancel()

View File

@@ -91,11 +91,12 @@ func (db *Database) CreateChannel(ctx TxContext, userid models.UserID, dispName
channelid := models.NewChannelID()
_, err = tx.Exec(ctx, "INSERT INTO channels (channel_id, owner_user_id, display_name, internal_name, subscribe_key, send_key, timestamp_created) VALUES (:cid, :ouid, :dnam, :inam, :subkey, :sendkey, :ts)", sq.PP{
_, err = tx.Exec(ctx, "INSERT INTO channels (channel_id, owner_user_id, display_name, internal_name, description_name, subscribe_key, send_key, timestamp_created) VALUES (:cid, :ouid, :dnam, :inam, :hnam, :subkey, :sendkey, :ts)", sq.PP{
"cid": channelid,
"ouid": userid,
"dnam": dispName,
"inam": intName,
"hnam": nil,
"subkey": subscribeKey,
"sendkey": sendKey,
"ts": time2DB(now),

View File

@@ -42,7 +42,9 @@ func NewPrimaryDatabase(cfg server.Config) (*Database, error) {
qqdb := sq.NewDB(xdb)
qqdb.AddListener(dbtools.DBLogger{})
if conf.EnableLogger {
qqdb.AddListener(dbtools.DBLogger{})
}
pp, err := dbtools.NewDBPreprocessor(qqdb)
if err != nil {
@@ -56,6 +58,10 @@ func NewPrimaryDatabase(cfg server.Config) (*Database, error) {
return scndb, nil
}
func (db *Database) DB() sq.DB {
return db.db
}
func (db *Database) Migrate(ctx context.Context) error {
ctx, cancel := context.WithTimeout(context.Background(), 24*time.Second)
defer cancel()

View File

@@ -42,7 +42,9 @@ func NewRequestsDatabase(cfg server.Config) (*Database, error) {
qqdb := sq.NewDB(xdb)
qqdb.AddListener(dbtools.DBLogger{})
if conf.EnableLogger {
qqdb.AddListener(dbtools.DBLogger{})
}
pp, err := dbtools.NewDBPreprocessor(qqdb)
if err != nil {
@@ -56,6 +58,10 @@ func NewRequestsDatabase(cfg server.Config) (*Database, error) {
return scndb, nil
}
func (db *Database) DB() sq.DB {
return db.db
}
func (db *Database) Migrate(ctx context.Context) error {
ctx, cancel := context.WithTimeout(context.Background(), 24*time.Second)
defer cancel()

View File

@@ -4,10 +4,12 @@ go 1.19
require (
github.com/gin-gonic/gin v1.8.1
github.com/go-playground/validator/v10 v10.10.0
github.com/go-sql-driver/mysql v1.6.0
github.com/jmoiron/sqlx v1.3.5
github.com/mattn/go-sqlite3 v1.14.16
github.com/rs/zerolog v1.28.0
gogs.mikescher.com/BlackForestBytes/goext v0.0.56
gogs.mikescher.com/BlackForestBytes/goext v0.0.59
gopkg.in/loremipsum.v1 v1.1.0
)
@@ -15,7 +17,6 @@ require (
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.0 // indirect
github.com/go-playground/universal-translator v0.18.0 // indirect
github.com/go-playground/validator/v10 v10.10.0 // indirect
github.com/goccy/go-json v0.9.7 // indirect
github.com/google/go-cmp v0.5.9 // indirect
github.com/json-iterator/go v1.1.12 // indirect

View File

@@ -73,14 +73,14 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0=
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
gogs.mikescher.com/BlackForestBytes/goext v0.0.49 h1:Ro62ZyJW22elAJKT0XlY94LzAv0dVuiI2m0/Hp1xLgk=
gogs.mikescher.com/BlackForestBytes/goext v0.0.49/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.50 h1:WuhfxFVyywR7J4+hSTTW/wE87aFbGk7q22TGYusPg0s=
gogs.mikescher.com/BlackForestBytes/goext v0.0.50/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.55 h1:mzX/s+EBhnaRbiz3+6iwDJyJFS0F+jkbssiLDr9eJYY=
gogs.mikescher.com/BlackForestBytes/goext v0.0.55/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.56 h1:nl+2mP3BmkeB3kT6zFNXqYkOLc3JnFF3m8QwhxZJf2A=
gogs.mikescher.com/BlackForestBytes/goext v0.0.56/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.57 h1:R5M0Y+4kS6v5GtsXcHlDBYbcfenj1nOmAaNj4XQUous=
gogs.mikescher.com/BlackForestBytes/goext v0.0.57/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.58 h1:W53yfHhpFQS13zgtzCjfJQ42WG0OORa+kQWKrp+W73Q=
gogs.mikescher.com/BlackForestBytes/goext v0.0.58/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
gogs.mikescher.com/BlackForestBytes/goext v0.0.59 h1:3bHSjqgty9yp0EIyqwGAb06ZS7bLvm806zRj6j+WOEE=
gogs.mikescher.com/BlackForestBytes/goext v0.0.59/go.mod h1:ZEXyKUr8t0EKdPN1FYdk0klY7N8OwXxipGE9lWgpVE8=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8=
golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80=

View File

@@ -12,8 +12,8 @@ import (
"github.com/gin-gonic/gin/binding"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"gogs.mikescher.com/BlackForestBytes/goext/syncext"
"math/rand"
"net"
"net/http"
"os"
@@ -24,9 +24,9 @@ import (
"time"
)
var rexWhitespaceStart = regexp.MustCompile("^\\s+")
var rexWhitespaceEnd = regexp.MustCompile("\\s+$")
var rexWhitespaceStart = rext.W(regexp.MustCompile("^\\s+"))
var rexWhitespaceEnd = rext.W(regexp.MustCompile("\\s+$"))
var rexNormalizeUsername = rext.W(regexp.MustCompile("[^[:alnum:]\\-_ ]"))
type Application struct {
Config scn.Config
@@ -154,12 +154,7 @@ func (app *Application) Run() {
}
func (app *Application) GenerateRandomAuthKey() string {
charset := "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
k := ""
for i := 0; i < 64; i++ {
k += string(charset[rand.Int()%len(charset)])
}
return k
return scn.RandomAuthKey()
}
func (app *Application) QuotaMax(ispro bool) int {
@@ -171,6 +166,10 @@ func (app *Application) QuotaMax(ispro bool) int {
}
func (app *Application) VerifyProToken(ctx *AppContext, token string) (bool, error) {
if strings.HasPrefix(token, "ANDROID|v1|") {
subToken := token[len("ANDROID|v2|"):]
return app.VerifyAndroidProToken(ctx, subToken)
}
if strings.HasPrefix(token, "ANDROID|v2|") {
subToken := token[len("ANDROID|v2|"):]
return app.VerifyAndroidProToken(ctx, subToken)
@@ -319,8 +318,8 @@ func (app *Application) GetOrCreateChannel(ctx *AppContext, userid models.UserID
func (app *Application) NormalizeChannelDisplayName(v string) string {
v = strings.TrimSpace(v)
v = rexWhitespaceStart.ReplaceAllString(v, "")
v = rexWhitespaceEnd.ReplaceAllString(v, "")
v = rexWhitespaceStart.RemoveAll(v)
v = rexWhitespaceEnd.RemoveAll(v)
return v
}
@@ -328,17 +327,15 @@ func (app *Application) NormalizeChannelDisplayName(v string) string {
func (app *Application) NormalizeChannelInternalName(v string) string {
v = strings.TrimSpace(v)
v = strings.ToLower(v)
v = rexWhitespaceStart.ReplaceAllString(v, "")
v = rexWhitespaceEnd.ReplaceAllString(v, "")
v = rexWhitespaceStart.RemoveAll(v)
v = rexWhitespaceEnd.RemoveAll(v)
return v
}
func (app *Application) NormalizeUsername(v string) string {
rex := regexp.MustCompile("[^[:alnum:]\\-_ ]")
v = strings.TrimSpace(v)
v = rex.ReplaceAllString(v, "")
v = rexNormalizeUsername.RemoveAll(v)
return v
}

View File

@@ -7,6 +7,7 @@ import (
"github.com/go-playground/validator/v10"
"github.com/rs/zerolog/log"
"gogs.mikescher.com/BlackForestBytes/goext/langext"
"gogs.mikescher.com/BlackForestBytes/goext/rext"
"math/big"
"reflect"
"regexp"
@@ -19,7 +20,7 @@ type EntityID interface {
Prefix() string
Raw() string
CheckString() string
Regex() *regexp.Regexp
Regex() rext.Regex
}
const idlen = 24
@@ -51,8 +52,8 @@ var (
regexRequestID = generateRegex(prefixRequestID)
)
func generateRegex(prefix string) *regexp.Regexp {
return regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen))
func generateRegex(prefix string) rext.Regex {
return rext.W(regexp.MustCompile(fmt.Sprintf("^%s[%s]{%d}[%s]{%d}$", prefix, idCharset, idlen-len(prefix)-checklen, idCharset, checklen)))
}
func generateCharsetMap() []int {
@@ -179,7 +180,7 @@ func (id UserID) CheckString() string {
return getCheckString(prefixUserID, string(id))
}
func (id UserID) Regex() *regexp.Regexp {
func (id UserID) Regex() rext.Regex {
return regexUserID
}
@@ -211,7 +212,7 @@ func (id ChannelID) CheckString() string {
return getCheckString(prefixChannelID, string(id))
}
func (id ChannelID) Regex() *regexp.Regexp {
func (id ChannelID) Regex() rext.Regex {
return regexChannelID
}
@@ -243,7 +244,7 @@ func (id DeliveryID) CheckString() string {
return getCheckString(prefixDeliveryID, string(id))
}
func (id DeliveryID) Regex() *regexp.Regexp {
func (id DeliveryID) Regex() rext.Regex {
return regexDeliveryID
}
@@ -275,7 +276,7 @@ func (id MessageID) CheckString() string {
return getCheckString(prefixMessageID, string(id))
}
func (id MessageID) Regex() *regexp.Regexp {
func (id MessageID) Regex() rext.Regex {
return regexMessageID
}
@@ -307,7 +308,7 @@ func (id SubscriptionID) CheckString() string {
return getCheckString(prefixSubscriptionID, string(id))
}
func (id SubscriptionID) Regex() *regexp.Regexp {
func (id SubscriptionID) Regex() rext.Regex {
return regexSubscriptionID
}
@@ -339,7 +340,7 @@ func (id ClientID) CheckString() string {
return getCheckString(prefixClientID, string(id))
}
func (id ClientID) Regex() *regexp.Regexp {
func (id ClientID) Regex() rext.Regex {
return regexClientID
}
@@ -371,6 +372,6 @@ func (id RequestID) CheckString() string {
return getCheckString(prefixRequestID, string(id))
}
func (id RequestID) Regex() *regexp.Regexp {
func (id RequestID) Regex() rext.Regex {
return regexRequestID
}

View File

@@ -2,6 +2,7 @@ package server
import (
"gogs.mikescher.com/BlackForestBytes/goext/timeext"
"math/rand"
"time"
)
@@ -12,3 +13,12 @@ func QuotaDayString() string {
func NextDeliveryTimestamp(now time.Time) time.Time {
return now.Add(5 * time.Second)
}
func RandomAuthKey() string {
charset := "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
k := ""
for i := 0; i < 64; i++ {
k += string(charset[rand.Int()%len(charset)])
}
return k
}