This commit is contained in:
@@ -0,0 +1,29 @@
|
||||
package cursortoken
|
||||
|
||||
import (
|
||||
"git.blackforestbytes.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSortDirectionToMongoASC(t *testing.T) {
|
||||
tst.AssertEqual(t, SortASC.ToMongo(), 1)
|
||||
}
|
||||
|
||||
func TestSortDirectionToMongoDESC(t *testing.T) {
|
||||
tst.AssertEqual(t, SortDESC.ToMongo(), -1)
|
||||
}
|
||||
|
||||
func TestSortDirectionToMongoEmpty(t *testing.T) {
|
||||
var sd SortDirection
|
||||
tst.AssertEqual(t, sd.ToMongo(), 0)
|
||||
}
|
||||
|
||||
func TestSortDirectionToMongoUnknown(t *testing.T) {
|
||||
sd := SortDirection("xyz")
|
||||
tst.AssertEqual(t, sd.ToMongo(), 0)
|
||||
}
|
||||
|
||||
func TestSortDirectionConstants(t *testing.T) {
|
||||
tst.AssertEqual(t, string(SortASC), "ASC")
|
||||
tst.AssertEqual(t, string(SortDESC), "DESC")
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
package cursortoken
|
||||
|
||||
import (
|
||||
"git.blackforestbytes.com/BlackForestBytes/goext/tst"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestStartToken(t *testing.T) {
|
||||
tok := Start()
|
||||
tst.AssertEqual(t, tok.Token(), "@start")
|
||||
tst.AssertTrue(t, tok.IsStart())
|
||||
tst.AssertFalse(t, tok.IsEnd())
|
||||
}
|
||||
|
||||
func TestEndToken(t *testing.T) {
|
||||
tok := End()
|
||||
tst.AssertEqual(t, tok.Token(), "@end")
|
||||
tst.AssertTrue(t, tok.IsEnd())
|
||||
tst.AssertFalse(t, tok.IsStart())
|
||||
}
|
||||
|
||||
func TestNewKeySortTokenBasic(t *testing.T) {
|
||||
tok := NewKeySortToken("alpha", "beta", SortASC, SortDESC, 50, Extra{})
|
||||
tst.AssertFalse(t, tok.IsEnd())
|
||||
tst.AssertFalse(t, tok.IsStart())
|
||||
str := tok.Token()
|
||||
tst.AssertTrue(t, strings.HasPrefix(str, "tok_"))
|
||||
}
|
||||
|
||||
func TestNewKeySortTokenRoundTrip(t *testing.T) {
|
||||
original := NewKeySortToken("primary-val", "secondary-val", SortASC, SortDESC, 25, Extra{})
|
||||
encoded := original.Token()
|
||||
|
||||
decoded, err := Decode(encoded)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ks, ok := decoded.(CTKeySort)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertEqual(t, ks.ValuePrimary, "primary-val")
|
||||
tst.AssertEqual(t, ks.ValueSecondary, "secondary-val")
|
||||
tst.AssertEqual(t, ks.Direction, SortASC)
|
||||
tst.AssertEqual(t, ks.DirectionSecondary, SortDESC)
|
||||
tst.AssertEqual(t, ks.PageSize, 25)
|
||||
tst.AssertEqual(t, ks.Mode, CTMNormal)
|
||||
}
|
||||
|
||||
func TestKeySortTokenWithExtra(t *testing.T) {
|
||||
ts := time.Date(2024, 6, 15, 10, 30, 0, 0, time.UTC)
|
||||
id := "object-id-123"
|
||||
page := 7
|
||||
pageSize := 42
|
||||
|
||||
original := NewKeySortToken("p", "s", SortDESC, SortASC, 10, Extra{
|
||||
Timestamp: &ts,
|
||||
Id: &id,
|
||||
Page: &page,
|
||||
PageSize: &pageSize,
|
||||
})
|
||||
encoded := original.Token()
|
||||
|
||||
decoded, err := Decode(encoded)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ks, ok := decoded.(CTKeySort)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertTrue(t, ks.Extra.Timestamp != nil)
|
||||
tst.AssertTrue(t, ks.Extra.Timestamp.Equal(ts))
|
||||
tst.AssertDeRefEqual(t, ks.Extra.Id, "object-id-123")
|
||||
tst.AssertDeRefEqual(t, ks.Extra.Page, 7)
|
||||
tst.AssertDeRefEqual(t, ks.Extra.PageSize, 42)
|
||||
}
|
||||
|
||||
func TestKeySortTokenStartRoundTrip(t *testing.T) {
|
||||
original := Start()
|
||||
decoded, err := Decode(original.Token())
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, decoded.IsStart())
|
||||
tst.AssertFalse(t, decoded.IsEnd())
|
||||
}
|
||||
|
||||
func TestKeySortTokenEndRoundTrip(t *testing.T) {
|
||||
original := End()
|
||||
decoded, err := Decode(original.Token())
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, decoded.IsEnd())
|
||||
tst.AssertFalse(t, decoded.IsStart())
|
||||
}
|
||||
|
||||
func TestKeySortTokenEmptyValues(t *testing.T) {
|
||||
tok := CTKeySort{Mode: CTMNormal}
|
||||
encoded := tok.Token()
|
||||
tst.AssertTrue(t, strings.HasPrefix(encoded, "tok_"))
|
||||
|
||||
decoded, err := Decode(encoded)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ks, ok := decoded.(CTKeySort)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertEqual(t, ks.ValuePrimary, "")
|
||||
tst.AssertEqual(t, ks.ValueSecondary, "")
|
||||
tst.AssertEqual(t, ks.Direction, SortDirection(""))
|
||||
tst.AssertEqual(t, ks.DirectionSecondary, SortDirection(""))
|
||||
tst.AssertEqual(t, ks.PageSize, 0)
|
||||
}
|
||||
|
||||
func TestKeySortTokenOnlyTimestamp(t *testing.T) {
|
||||
ts := time.Date(2020, 1, 2, 3, 4, 5, 0, time.UTC)
|
||||
tok := CTKeySort{
|
||||
Mode: CTMNormal,
|
||||
Extra: Extra{Timestamp: &ts},
|
||||
}
|
||||
|
||||
decoded, err := Decode(tok.Token())
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ks, ok := decoded.(CTKeySort)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertTrue(t, ks.Extra.Timestamp != nil)
|
||||
tst.AssertTrue(t, ks.Extra.Timestamp.Equal(ts))
|
||||
tst.AssertTrue(t, ks.Extra.Id == nil)
|
||||
tst.AssertTrue(t, ks.Extra.Page == nil)
|
||||
tst.AssertTrue(t, ks.Extra.PageSize == nil)
|
||||
}
|
||||
|
||||
func TestKeySortTokenSpecialChars(t *testing.T) {
|
||||
original := NewKeySortToken("hello world / @!#$%", "äöü€", SortASC, SortASC, 1, Extra{})
|
||||
decoded, err := Decode(original.Token())
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
ks, ok := decoded.(CTKeySort)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertEqual(t, ks.ValuePrimary, "hello world / @!#$%")
|
||||
tst.AssertEqual(t, ks.ValueSecondary, "äöü€")
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
package cursortoken
|
||||
|
||||
import (
|
||||
"git.blackforestbytes.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPageToken(t *testing.T) {
|
||||
tok := Page(5)
|
||||
tst.AssertEqual(t, tok.Token(), "$5")
|
||||
tst.AssertFalse(t, tok.IsEnd())
|
||||
tst.AssertFalse(t, tok.IsStart())
|
||||
}
|
||||
|
||||
func TestPageTokenOne(t *testing.T) {
|
||||
tok := Page(1)
|
||||
tst.AssertEqual(t, tok.Token(), "$1")
|
||||
tst.AssertFalse(t, tok.IsEnd())
|
||||
tst.AssertTrue(t, tok.IsStart())
|
||||
}
|
||||
|
||||
func TestPageTokenLarge(t *testing.T) {
|
||||
tok := Page(123456)
|
||||
tst.AssertEqual(t, tok.Token(), "$123456")
|
||||
}
|
||||
|
||||
func TestPageTokenZero(t *testing.T) {
|
||||
tok := Page(0)
|
||||
tst.AssertEqual(t, tok.Token(), "$0")
|
||||
tst.AssertFalse(t, tok.IsEnd())
|
||||
tst.AssertFalse(t, tok.IsStart())
|
||||
}
|
||||
|
||||
func TestPageEndToken(t *testing.T) {
|
||||
tok := PageEnd()
|
||||
tst.AssertEqual(t, tok.Token(), "$end")
|
||||
tst.AssertTrue(t, tok.IsEnd())
|
||||
tst.AssertFalse(t, tok.IsStart())
|
||||
}
|
||||
|
||||
func TestPaginatedStartMode(t *testing.T) {
|
||||
tok := CTPaginated{Mode: CTMStart, Page: 0}
|
||||
tst.AssertEqual(t, tok.Token(), "$1")
|
||||
tst.AssertTrue(t, tok.IsStart())
|
||||
tst.AssertFalse(t, tok.IsEnd())
|
||||
}
|
||||
|
||||
func TestPaginatedEndMode(t *testing.T) {
|
||||
tok := CTPaginated{Mode: CTMEnd, Page: 99}
|
||||
tst.AssertEqual(t, tok.Token(), "$end")
|
||||
tst.AssertTrue(t, tok.IsEnd())
|
||||
}
|
||||
|
||||
func TestPaginatedRoundTrip(t *testing.T) {
|
||||
for _, page := range []int{2, 3, 7, 100, 9999} {
|
||||
tok := Page(page)
|
||||
decoded, err := Decode(tok.Token())
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertEqual(t, decoded.Token(), tok.Token())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,125 @@
|
||||
package cursortoken
|
||||
|
||||
import (
|
||||
"git.blackforestbytes.com/BlackForestBytes/goext/tst"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDecodeEmpty(t *testing.T) {
|
||||
tok, err := Decode("")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsStart())
|
||||
tst.AssertFalse(t, tok.IsEnd())
|
||||
tst.AssertEqual(t, tok.Token(), "@start")
|
||||
}
|
||||
|
||||
func TestDecodeAtStart(t *testing.T) {
|
||||
tok, err := Decode("@start")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsStart())
|
||||
tst.AssertFalse(t, tok.IsEnd())
|
||||
}
|
||||
|
||||
func TestDecodeAtStartUppercase(t *testing.T) {
|
||||
tok, err := Decode("@START")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsStart())
|
||||
}
|
||||
|
||||
func TestDecodeAtStartMixedCase(t *testing.T) {
|
||||
tok, err := Decode("@StArT")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsStart())
|
||||
}
|
||||
|
||||
func TestDecodeAtEnd(t *testing.T) {
|
||||
tok, err := Decode("@end")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsEnd())
|
||||
tst.AssertFalse(t, tok.IsStart())
|
||||
}
|
||||
|
||||
func TestDecodeAtEndUppercase(t *testing.T) {
|
||||
tok, err := Decode("@END")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsEnd())
|
||||
}
|
||||
|
||||
func TestDecodeDollarEnd(t *testing.T) {
|
||||
tok, err := Decode("$end")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsEnd())
|
||||
_, ok := tok.(CTPaginated)
|
||||
tst.AssertTrue(t, ok)
|
||||
}
|
||||
|
||||
func TestDecodeDollarEndUppercase(t *testing.T) {
|
||||
tok, err := Decode("$END")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsEnd())
|
||||
}
|
||||
|
||||
func TestDecodeDollarPage(t *testing.T) {
|
||||
tok, err := Decode("$5")
|
||||
tst.AssertNoErr(t, err)
|
||||
pg, ok := tok.(CTPaginated)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertEqual(t, pg.Page, 5)
|
||||
tst.AssertEqual(t, pg.Mode, CTMNormal)
|
||||
}
|
||||
|
||||
func TestDecodeDollarPageOne(t *testing.T) {
|
||||
tok, err := Decode("$1")
|
||||
tst.AssertNoErr(t, err)
|
||||
tst.AssertTrue(t, tok.IsStart())
|
||||
pg, ok := tok.(CTPaginated)
|
||||
tst.AssertTrue(t, ok)
|
||||
tst.AssertEqual(t, pg.Page, 1)
|
||||
}
|
||||
|
||||
func TestDecodeDollarPageInvalid(t *testing.T) {
|
||||
_, err := Decode("$abc")
|
||||
if err == nil {
|
||||
t.Fatalf("expected error for invalid page")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeUnknownPrefix(t *testing.T) {
|
||||
_, err := Decode("foobar")
|
||||
if err == nil {
|
||||
t.Fatalf("expected error for unknown prefix")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeInvalidBase32(t *testing.T) {
|
||||
_, err := Decode("tok_!!!")
|
||||
if err == nil {
|
||||
t.Fatalf("expected error for invalid base32 body")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeInvalidJSON(t *testing.T) {
|
||||
// "tok_" prefix with valid base32 but invalid JSON content
|
||||
_, err := Decode("tok_NBSWY3DP")
|
||||
if err == nil {
|
||||
t.Fatalf("expected error for invalid json body")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeJustDollar(t *testing.T) {
|
||||
// "$" alone (length == 1) should fall through to the unknown-prefix branch
|
||||
_, err := Decode("$")
|
||||
if err == nil {
|
||||
t.Fatalf("expected error for bare $")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeKnownTokenContent(t *testing.T) {
|
||||
tok := NewKeySortToken("k1", "k2", SortASC, SortDESC, 33, Extra{})
|
||||
encoded := tok.Token()
|
||||
|
||||
decoded, err := Decode(encoded)
|
||||
tst.AssertNoErr(t, err)
|
||||
|
||||
tst.AssertEqual(t, decoded.Token(), encoded)
|
||||
}
|
||||
Reference in New Issue
Block a user