67 lines
1.5 KiB
Go
67 lines
1.5 KiB
Go
|
package tss
|
||
|
|
||
|
import "fmt"
|
||
|
import "strings"
|
||
|
import "testing"
|
||
|
import "git.tebibyte.media/sashakoshka/goparse"
|
||
|
|
||
|
func TestLexSimple (test *testing.T) {
|
||
|
testString(test,
|
||
|
`hello #BABE {#Beef}, 384920 #0ab3fc840`,
|
||
|
tok(Ident, "hello"),
|
||
|
tok(Color, "BABE"),
|
||
|
tok(LBrace, "{"),
|
||
|
tok(Color, "Beef"),
|
||
|
tok(RBrace, "}"),
|
||
|
tok(Comma, ","),
|
||
|
tok(Number, "384920"),
|
||
|
tok(Color, "0ab3fc840"),
|
||
|
tok(parse.EOF, ""),
|
||
|
)}
|
||
|
|
||
|
func testString (test *testing.T, input string, correct ...parse.Token) {
|
||
|
lexer := Lex("test.tss", strings.NewReader(input))
|
||
|
index := 0
|
||
|
for {
|
||
|
token, err := lexer.Next()
|
||
|
if err != nil { test.Fatalf("lexer returned error:\n%v", parse.Format(err)) }
|
||
|
if index >= len(correct) {
|
||
|
test.Logf("%d:\t%-16s | !", index, tokStr(token))
|
||
|
test.Fatalf("index %d greater than %d", index, len(correct))
|
||
|
}
|
||
|
correctToken := correct[index]
|
||
|
test.Logf (
|
||
|
"%d:\t%-16s | %s",
|
||
|
index,
|
||
|
tokStr(token),
|
||
|
tokStr(correctToken))
|
||
|
if correctToken.Kind != token.Kind || correctToken.Value != token.Value {
|
||
|
test.Fatalf("tokens at %d do not match up", index)
|
||
|
}
|
||
|
if token.Is(parse.EOF) { break }
|
||
|
index ++
|
||
|
}
|
||
|
if index < len(correct) - 1 {
|
||
|
test.Fatalf("index %d less than %d", index, len(correct) - 1)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func tokStr (token parse.Token) string {
|
||
|
name, ok := tokenNames[token.Kind]
|
||
|
if !ok {
|
||
|
name = fmt.Sprintf("Token(%d)", token.Kind)
|
||
|
}
|
||
|
if token.Value == "" {
|
||
|
return name
|
||
|
} else {
|
||
|
return fmt.Sprintf("%s:\"%s\"", name, token.Value)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func tok (kind parse.TokenKind, value string) parse.Token {
|
||
|
return parse.Token {
|
||
|
Kind: kind,
|
||
|
Value: value,
|
||
|
}
|
||
|
}
|